├── .github └── workflows │ ├── ci.yml │ └── deny.yml ├── .gitignore ├── Cargo.toml ├── README.md ├── Rustfmt.toml ├── cli ├── Cargo.toml └── src │ ├── lib.rs │ ├── main.rs │ └── shell.rs ├── client └── Cargo.toml ├── core ├── Cargo.toml ├── src │ ├── distill_signal.rs │ ├── importer_context.rs │ ├── lib.rs │ └── utils.rs └── tests │ └── uuid.rs ├── daemon ├── Cargo.toml ├── src │ ├── artifact_cache.rs │ ├── asset_hub.rs │ ├── asset_hub_service.rs │ ├── capnp_db.rs │ ├── daemon.rs │ ├── error.rs │ ├── extension_map.rs │ ├── file_asset_source.rs │ ├── file_tracker.rs │ ├── lib.rs │ ├── serialized_asset.rs │ ├── source_pair_import.rs │ ├── timeout.rs │ ├── watcher.rs │ └── websocket_async_io.rs └── tests │ └── file_tracker │ └── test.txt ├── deny.toml ├── docs ├── graphics │ ├── build.dot │ ├── build.svg │ ├── import.dot │ ├── import.svg │ ├── overview.dot │ ├── overview.svg │ ├── pipeline_example.dot │ ├── pipeline_example.svg │ ├── processing_example_meshopt.dot │ ├── processing_example_meshopt.svg │ ├── processing_example_spritesheet.dot │ ├── processing_example_spritesheet.svg │ └── ux │ │ ├── pipeline_ux_meshopt.dot │ │ ├── pipeline_ux_meshopt.svg │ │ ├── pipeline_ux_spritesheet_gen_with_list.dot │ │ ├── pipeline_ux_spritesheet_gen_with_list.svg │ │ ├── pipeline_ux_tex_combine.dot │ │ ├── pipeline_ux_tex_combine.svg │ │ ├── pipeline_ux_tex_srgb_simple.dot │ │ └── pipeline_ux_text_srgb_simple.svg ├── rfc │ ├── assetid.md │ └── overview.md └── source_overview.md ├── examples ├── daemon_with_loader │ ├── Cargo.toml │ ├── assets │ │ ├── amethyst.png │ │ └── amethyst.png.meta │ └── src │ │ ├── game.rs │ │ ├── image.rs │ │ └── main.rs └── handle_integration │ ├── Cargo.toml │ ├── assets │ ├── amethyst.png │ ├── amethyst.png.meta │ ├── custom_asset.ron │ └── custom_asset.ron.meta │ └── src │ ├── custom_asset.rs │ ├── game.rs │ ├── image.rs │ ├── main.rs │ └── storage.rs ├── importer ├── Cargo.toml ├── serde-importable-derive │ ├── Cargo.toml │ └── src │ │ └── lib.rs └── src │ ├── boxed_importer.rs │ ├── error.rs │ ├── lib.rs │ ├── ron_importer.rs │ ├── serde_obj.rs │ └── serialized_asset.rs ├── loader ├── Cargo.toml └── src │ ├── handle.rs │ ├── io.rs │ ├── lib.rs │ ├── loader.rs │ ├── packfile_io.rs │ ├── rpc_io.rs │ ├── storage.rs │ └── task_local.rs ├── processing ├── Cargo.toml └── src │ ├── graph.rs │ ├── lib.rs │ └── processor.rs ├── schema ├── Cargo.toml ├── schema-gen │ ├── Cargo.toml │ └── src │ │ └── main.rs ├── schemas │ ├── data.capnp │ ├── pack.capnp │ └── service.capnp └── src │ ├── lib.rs │ └── schemas │ ├── data_capnp.rs │ ├── mod.rs │ ├── pack_capnp.rs │ └── service_capnp.rs ├── src └── lib.rs └── tests └── assets ├── asset.txt ├── asset.txt.meta ├── asset_a.txt ├── asset_a.txt.meta ├── asset_b.txt ├── asset_b.txt.meta ├── asset_c.txt ├── asset_c.txt.meta ├── asset_d.txt └── asset_d.txt.meta /.github/workflows/ci.yml: -------------------------------------------------------------------------------- 1 | name: CI 2 | 3 | on: 4 | push: 5 | branches: 6 | - master 7 | pull_request: 8 | 9 | env: 10 | RUSTFLAGS: -Cdebuginfo=0 -Dwarnings 11 | CARGO_TERM_COLOR: always 12 | CARGO_INCREMENTAL: 0 13 | 14 | jobs: 15 | tests: 16 | name: Lint and Tests 17 | runs-on: ${{ matrix.os }} 18 | continue-on-error: ${{ matrix.toolchain == 'nightly' }} 19 | strategy: 20 | fail-fast: true 21 | matrix: 22 | os: [macos-latest, windows-latest, ubuntu-latest] 23 | toolchain: [stable, nightly] 24 | steps: 25 | - uses: actions/checkout@v2 26 | 27 | - uses: actions-rs/toolchain@v1 28 | with: 29 | toolchain: ${{ matrix.toolchain }} 30 | profile: minimal 31 | override: true 32 | 33 | - run: rustup component add rustfmt 34 | if: matrix.toolchain == 'stable' && matrix.os == 'ubuntu-latest' 35 | 36 | - run: cargo fmt --all -- --check 37 | if: matrix.toolchain == 'stable' && matrix.os == 'ubuntu-latest' 38 | 39 | - run: rustup component add clippy 40 | if: matrix.toolchain == 'stable' && matrix.os == 'ubuntu-latest' 41 | 42 | - run: cargo +stable clippy --workspace --all-targets --all-features 43 | if: matrix.toolchain == 'stable' && matrix.os == 'ubuntu-latest' 44 | 45 | #TODO: macOS has tests are failing in github CI, but are not reproducing elsewhere 46 | - run: cargo test --workspace --all-features -- --nocapture --test-threads=1 47 | if: matrix.toolchain == 'stable' 48 | env: 49 | RUST_LOG: TRACE 50 | -------------------------------------------------------------------------------- /.github/workflows/deny.yml: -------------------------------------------------------------------------------- 1 | name: cargo-deny 2 | 3 | on: [pull_request] 4 | 5 | env: 6 | CARGO_TERM_COLOR: always 7 | 8 | jobs: 9 | cargo-deny: 10 | runs-on: ubuntu-latest 11 | strategy: 12 | matrix: 13 | checks: 14 | - advisories 15 | - bans licenses sources 16 | 17 | # Prevent sudden announcement of a new advisory from failing ci: 18 | continue-on-error: ${{ matrix.checks == 'advisories' }} 19 | 20 | steps: 21 | - uses: actions/checkout@v2 22 | - uses: EmbarkStudios/cargo-deny-action@v1 23 | with: 24 | command: check ${{ matrix.checks }} -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | target 2 | /assets 3 | 4 | .assets_db 5 | */tests/assets_db 6 | *.mdb 7 | 8 | Cargo.lock 9 | 10 | .DS_Store 11 | .idea/ 12 | .vscode/ 13 | -------------------------------------------------------------------------------- /Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "distill" 3 | version = "0.0.3" 4 | authors = ["Karl Bergström "] 5 | edition = "2018" 6 | description = "Asset framework for game engines & editor suites." 7 | exclude = ["examples/*"] 8 | keywords = ["game", "engine", "assets", "loading", "pipeline"] 9 | categories = ["game-engines"] 10 | 11 | repository = "https://github.com/amethyst/distill" 12 | 13 | readme = "README.md" 14 | license = "MIT OR Apache-2.0" 15 | 16 | [workspace] 17 | members = [ 18 | "importer", 19 | "loader", 20 | "schema", 21 | "cli", 22 | "daemon", 23 | "core", 24 | "examples/daemon_with_loader", 25 | "examples/handle_integration", 26 | ] 27 | 28 | [dependencies] 29 | distill-core = { version = "=0.0.3", path = "core", optional = true } 30 | distill-daemon = { version = "=0.0.3", path = "daemon", optional = true } 31 | distill-importer = { version = "=0.0.3", path = "importer", optional = true } 32 | distill-loader = { version = "=0.0.3", path = "loader", optional = true } 33 | 34 | [dev-dependencies] 35 | futures = "0.3" 36 | serde = "1" 37 | uuid = "0.8.2" 38 | serial_test = "0.5.1" 39 | 40 | [features] 41 | default = ["distill-core", "distill-loader", "rpc_io", "handle", "packfile_io"] 42 | serde-1 = ["distill-core/serde"] 43 | type_uuid = ["distill-core/type_uuid"] 44 | serde_importers = ["distill-importer/serde_importers"] 45 | parallel_hash = ["distill-daemon/parallel_hash"] 46 | pretty_log = ["distill-daemon/pretty_log"] 47 | rpc_io = ["distill-loader/rpc_io"] 48 | packfile_io = ["distill-loader/packfile_io"] 49 | handle = ["distill-loader/handle"] -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | [![Rust](https://github.com/amethyst/distill/workflows/CI/badge.svg)](https://github.com/amethyst/distill/actions) 2 | 3 | # Distill 4 | Distill is an asset pipeline for games, reading artist-friendly formats from disk, processing them into your engine-ready formats, and delivering them to your game runtime. Distill handles dependencies between assets, import & build caching, cross-device hot reloading during development, packing assets for a shippable game build, and more. 5 | 6 | # Vision 7 | To create an open-source go-to solution for asset processing in games. 8 | 9 | # Features 10 | The project contains a number of different components, and some can be used independently of others. You can combine them in different ways to tailor them to your workflow. Checkmarks indicate feature support - some features are dreamed up but not implemented. 11 | 12 | ## Daemon 13 | The daemon watches for filesystem events, imports source files to produce assets, manages metadata and serves asset load requests. It is primarily intended for use during development, but can also be used in a distributed game if appropriate. The daemon is very resource efficient and only does work when either a file changes or work is requested. Other components interact with the daemon through a transport-agnostic RPC protocol. 14 | 15 |
Asset UUIDs & Dependency Graphs

Every asset is identified by a 16-byte UUID that is generated when a source file is imported for the first time. Importers also produce an asset's build and load dependencies in terms of UUIDs which can be used to efficiently traverse the dependency graph of an asset without touching the filesystem.

16 |
Source file change detection

The daemon watches for filesystem changes and ensures source files are only imported when they change. Metadata and hashes are indexed locally in LMDB and version controlled in .meta files. Filesystem modification time and hashes are used to reduce redundant imports across your whole team to the greatest extent possible.

17 |
Import Caching

Assets imported from a source file are cached by a hash of their source file content and its ID, avoiding expensive parsing and disk operations.

18 |
Asset Change Log

Asset metadata is maintained in LMDB, a transactional database. The database's consistency guarantees and snapshot support provides a way to synchronize external data stores with the current state of the asset metadata using the Asset Change Log of asset changes.

19 |
Metadata Tracking & Caching

When assets are imported from source files, metadata is generated and stored in `.meta` files together with source file, as well as cached in a database. Commit these to version control along with your source files.

20 |
Move & Rename Source Files Confidently

Since metadata is stored with the source file and UUIDs are used to identify individual assets, users can move, rename and share source files with others without breaking references between assets.

21 |
Bring Your Own Asset Types

Asset types are not included in this project. You define your own asset types and source file formats by implementing the `Importer` trait and registering these with a file extension. The Daemon will automatically run your `Importer` for files with the registered extension as required. All asset types must implement `serde::Serialize` + `serde::Deserialize` + `TypeUuidDynamic` + `Send`.

22 |
RON Importer - *OPTIONAL*

An optional Importer and derive macro is included to simplify usage of serialized Rust types as source files using `serde`. 23 | 24 | Type definition: 25 | ```rust 26 | #[derive(Serialize, Deserialize, TypeUuid, SerdeImportable)] 27 | #[uuid = "fab4249b-f95d-411d-a017-7549df090a4f"] 28 | pub struct CustomAsset { 29 | pub cool_string: String, 30 | pub handle_from_path: Handle, 31 | pub handle_from_uuid: Handle, 32 | } 33 | ``` 34 | `custom_asset.ron`: 35 | ``` 36 | { 37 | "fab4249b-f95d-411d-a017-7549df090a4f": 38 | ( 39 | cool_string: "thanks", 40 | // This references an asset from a file in the same directory called "amethyst.png" 41 | handle_from_path: "amethyst.png", 42 | // This references an asset with a UUID (see associated .meta file for an asset's UUID) 43 | handle_from_uuid: "6c5ae1ad-ae30-471b-985b-7d017265f19f" 44 | ) 45 | } 46 | ``` 47 | 48 | 49 |

50 | 51 | 52 | 53 | ## Loader 54 | The Loader module loads assets and their dependencies for a user-implemented `AssetStorage` trait to handle. Loader supports a pluggable `LoaderIO` trait for customizing where assets and their metadata are loaded from. 55 |
Hot Reloading

The built-in `RpcIO` implementation of `LoaderIO` talks to the `Daemon` and automatically reloads assets when an asset has changed.

56 |
Automatic Loading of Dependencies

When a source file is imported and an asset is produced, dependencies are gathered for the asset and saved as metadata. The Loader automatically ensures that dependencies are loaded before the asset is loaded, and that dependencies are unloaded when they are no longer needed.

57 |
serde` Support for Handles 🎉💯

An optional Handle type is provided with support for deserialization and serialization using `serde`. Handles can be deserialized as either a UUID or a path.

58 |
Automatic Registration of Handle Dependencies 🎉💯

Handle references that are serialized as part of an asset are automatically registered and the referenced assets are guaranteed to be loaded by the Loader before the depending asset is loaded. This means Handles in assets are always guaranteed to be valid and loaded.

59 |
Packing for distribution

To distribute your game, you will want to pack assets into files with enough metadata to load them quickly. The CLI supports packing assets into a file format which the `PackfileIO` implementation supports loading.

60 | 61 | 62 | ## TODO 63 |
Networked artifact caching

Results of imports and builds can be re-used across your whole team using a networked cache server.

64 |
Platform-specific builds

Provide customized build parameters when building an asset and tailor the build artifact for a specific platform.

65 |
Scalable build pipeline

Once assets are imported from sources, the build system aims to be completely pure in the functional programming sense. Inputs to asset builds are all known and declared in the import step. This design enables parallelizable and even distributed builds.

66 |
Searching

Search tags can be produced at import and are automatically indexed by tantivy which enables super fast text search. The search index is incrementally maintained by subscribing to the Asset Change Log.

67 | 68 | # Cross-Platform Support 69 | The project aims to support as many platforms as possible with the `Loader` module, while the `Daemon` may never be able to run on platforms without solid filesystem support such as WASM. 70 | Current known supported platforms: 71 |

Linux/Mac/Windows: Loader + Daemon

72 |

iOS: Loader

73 | 74 | # Examples 75 | To run: 76 | - `cd examples/handle_integration` 77 | - `cargo run` 78 | - The example includes an image asset type, so try to put some images (png, jpg, tga) in the `assets` folder! 79 | 80 | Have a look at the generated `.meta` files in the `assets` folder! 81 | 82 | # Get involved 83 | This project is primarily used by [Amethyst](https://github.com/amethyst/amethyst) and casual communication around development happens in the #engine-general channel of the [Amethyst Discord server](https://discord.gg/amethyst). Feel free to drop by for a chat. Contributions or questions are very welcome! 84 | 85 | ### Contribution 86 | 87 | Unless you explicitly state otherwise, any contribution intentionally 88 | submitted for inclusion in the work by you, as defined in the Apache-2.0 89 | license, shall be dual licensed as above, without any additional terms or 90 | conditions. 91 | 92 | See [LICENSE-APACHE](LICENSE-APACHE) and [LICENSE-MIT](LICENSE-MIT). 93 | 94 | ## License 95 | 96 | Licensed under either of 97 | 98 | * Apache License, Version 2.0, ([LICENSE-APACHE](LICENSE-APACHE) or http://www.apache.org/licenses/LICENSE-2.0) 99 | * MIT license ([LICENSE-MIT](LICENSE-MIT) or http://opensource.org/licenses/MIT) 100 | 101 | at your option. 102 | 103 | PLEASE NOTE that some dependencies may be licensed under other terms. These are listed in [deny.toml](deny.toml) under licenses.exceptions on a best-effort basis, and are validated in every CI run using [cargo-deny](https://github.com/EmbarkStudios/cargo-deny). 104 | 105 | ## Vendored Code 106 | 107 | In addition to crate dependencies, this project contains some vendored code: 108 | * [daemon/src/timeout.rs](daemon/src/timeout.rs) - Used under Apache 2.0/MIT license. (Only used in unit tests) 109 | -------------------------------------------------------------------------------- /Rustfmt.toml: -------------------------------------------------------------------------------- 1 | reorder_imports = true -------------------------------------------------------------------------------- /cli/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "distill-cli" 3 | version = "0.0.3" 4 | authors = ["Karl Bergström "] 5 | edition = "2018" 6 | license = "MIT OR Apache-2.0" 7 | description = "CLI component of `distill`." 8 | publish = false 9 | 10 | [dependencies] 11 | distill-schema = { version = "=0.0.3", path = "../schema" } 12 | 13 | capnp = "0.14.0" 14 | capnp-rpc = "0.14.0" 15 | futures = { version = "0.3", default-features = false, features = ["std", "async-await"] } 16 | uuid = "0.8.2" 17 | async-trait = "0.1.22" 18 | crossterm = { version = "0.17", features = ["event-stream"] } 19 | defer = "0.1.0" 20 | async-io = "1.4.1" 21 | async-net = "1.6.0" 22 | async-executor = "1.4.1" -------------------------------------------------------------------------------- /cli/src/main.rs: -------------------------------------------------------------------------------- 1 | use distill_cli::{shell::Shell, *}; 2 | 3 | pub fn main() -> Result<(), Box> { 4 | let local = async_executor::LocalExecutor::new(); 5 | async_io::block_on(local.run(async_main(&local))) 6 | } 7 | 8 | async fn async_main( 9 | local: &async_executor::LocalExecutor<'_>, 10 | ) -> Result<(), Box> { 11 | let ctx = create_context(local).await?; 12 | 13 | let mut shell = Shell::new(ctx); 14 | 15 | shell.register_command("pack", CmdPack); 16 | shell.register_command("show_all", CmdShowAll); 17 | shell.register_command("get", CmdGet); 18 | shell.register_command("build", CmdBuild); 19 | shell.register_command("path_for_asset", CmdPathForAsset); 20 | shell.register_command("assets_for_path", CmdAssetsForPath); 21 | 22 | shell.run_repl().await 23 | } 24 | -------------------------------------------------------------------------------- /client/Cargo.toml: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/amethyst/distill/852e48272ab3dad76c7490fce8df52461f94ebc0/client/Cargo.toml -------------------------------------------------------------------------------- /core/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "distill-core" 3 | version = "0.0.3" 4 | authors = ["Karl Bergström "] 5 | edition = "2018" 6 | license = "MIT OR Apache-2.0" 7 | description = "Core types and utils for `distill`" 8 | 9 | [features] 10 | serde-1 = ["serde"] 11 | type_uuid = ["type-uuid"] 12 | path_utils = ["dunce", "path-clean", "path-slash"] 13 | 14 | [dependencies] 15 | uuid = { version = "0.8.2", features = ["v4"] } 16 | serde = { version = "1", optional = true, features = ["derive"] } 17 | futures-core = { version = "0.3", default-features = false, features = ["alloc"] } 18 | futures = { version = "0.3", default-features = false, features = ["std", "async-await"] } 19 | type-uuid = { version = "0.1.2", optional = true, default-features = false } 20 | dunce = { version = "1.0", optional = true } 21 | path-clean = { version = "0.1", optional = true } 22 | path-slash = { version = "0.1.1", optional = true } 23 | 24 | [target.'cfg(target_arch = "wasm32")'.dependencies] 25 | getrandom = { version = "0.2", features = ["js"] } 26 | 27 | [dev-dependencies] 28 | serde_json = "1.0" 29 | bincode = "1.3.1" 30 | -------------------------------------------------------------------------------- /core/src/distill_signal.rs: -------------------------------------------------------------------------------- 1 | use std::future::Future; 2 | use std::pin::Pin; 3 | use std::task::{Context, Poll}; 4 | 5 | /// Thin wrapper around `futures::channel::oneshot` to match `tokio::sync::oneshot` interface. 6 | pub fn oneshot() -> (Sender, Receiver) { 7 | let (sender, receiver) = futures::channel::oneshot::channel(); 8 | (Sender::new(sender), Receiver::new(receiver)) 9 | } 10 | 11 | #[derive(Debug)] 12 | pub struct Receiver { 13 | inner: futures::channel::oneshot::Receiver, 14 | } 15 | 16 | impl Receiver { 17 | #[inline(always)] 18 | pub(crate) fn new(inner: futures::channel::oneshot::Receiver) -> Self { 19 | Receiver { inner } 20 | } 21 | 22 | #[inline] 23 | pub fn try_recv(&mut self) -> Result { 24 | match self.inner.try_recv() { 25 | Ok(Some(x)) => Ok(x), 26 | Ok(None) => Err(TryRecvError::Empty), 27 | Err(_canceled) => Err(TryRecvError::Closed), 28 | } 29 | } 30 | } 31 | 32 | impl Future for Receiver { 33 | type Output = Result; 34 | 35 | fn poll(mut self: Pin<&mut Self>, _cx: &mut Context<'_>) -> Poll> { 36 | match self.try_recv() { 37 | Ok(value) => Poll::Ready(Ok(value)), 38 | Err(TryRecvError::Closed) => Poll::Ready(Err(RecvError { 0: () })), 39 | Err(TryRecvError::Empty) => Poll::Pending, 40 | } 41 | } 42 | } 43 | 44 | #[derive(Debug)] 45 | pub struct Sender { 46 | inner: futures::channel::oneshot::Sender, 47 | } 48 | 49 | impl Sender { 50 | #[inline(always)] 51 | pub(crate) fn new(inner: futures::channel::oneshot::Sender) -> Self { 52 | Sender { inner } 53 | } 54 | 55 | #[inline] 56 | pub fn send(self, value: T) -> Result<(), RecvError> { 57 | match self.inner.send(value) { 58 | Ok(_) => Ok(()), 59 | Err(_) => Err(RecvError { 0: () }), 60 | } 61 | } 62 | } 63 | 64 | use self::error::*; 65 | pub mod error { 66 | use std::fmt; 67 | 68 | #[derive(Debug, Eq, PartialEq)] 69 | pub struct RecvError(pub(super) ()); 70 | 71 | #[derive(Debug, Eq, PartialEq)] 72 | pub enum TryRecvError { 73 | Empty, 74 | Closed, 75 | } 76 | 77 | impl fmt::Display for TryRecvError { 78 | fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result { 79 | match self { 80 | TryRecvError::Empty => write!(fmt, "channel empty"), 81 | TryRecvError::Closed => write!(fmt, "channel closed"), 82 | } 83 | } 84 | } 85 | 86 | impl std::error::Error for TryRecvError {} 87 | } 88 | -------------------------------------------------------------------------------- /core/src/importer_context.rs: -------------------------------------------------------------------------------- 1 | use futures_core::future::BoxFuture; 2 | 3 | use crate::{AssetRef, AssetUuid}; 4 | 5 | pub trait ImporterContextHandle: Send + Sync { 6 | fn scope<'a>(&'a self, fut: BoxFuture<'a, ()>) -> BoxFuture<'a, ()>; 7 | 8 | fn begin_serialize_asset(&mut self, asset: AssetUuid); 9 | /// Returns any registered dependencies 10 | fn end_serialize_asset(&mut self, asset: AssetUuid) -> std::collections::HashSet; 11 | /// Resolves an AssetRef to a specific AssetUuid 12 | fn resolve_ref(&mut self, asset_ref: &AssetRef, asset: AssetUuid); 13 | } 14 | 15 | pub trait ImporterContext: 'static + Send + Sync { 16 | fn handle(&self) -> Box; 17 | } 18 | -------------------------------------------------------------------------------- /core/src/lib.rs: -------------------------------------------------------------------------------- 1 | #![deny( 2 | rust_2018_compatibility, 3 | rust_2018_idioms, 4 | unused, 5 | unused_extern_crates, 6 | future_incompatible, 7 | nonstandard_style 8 | )] 9 | 10 | use std::fmt; 11 | #[cfg(feature = "serde-1")] 12 | use std::str::FromStr; 13 | 14 | #[cfg(feature = "serde-1")] 15 | use serde::{ 16 | de::{self, Visitor}, 17 | Deserialize, Deserializer, Serialize, Serializer, 18 | }; 19 | pub use uuid; 20 | use uuid::Uuid; 21 | 22 | pub mod importer_context; 23 | pub mod utils; 24 | 25 | pub mod distill_signal; 26 | 27 | /// A universally unique identifier for an asset. 28 | /// An asset can be a value of any Rust type that implements 29 | /// [`TypeUuidDynamic`] + [serde::Serialize] + [Send]. 30 | /// 31 | /// If using a human-readable format, serializes to a hyphenated UUID format and deserializes from 32 | /// any format supported by the `uuid` crate. Otherwise, serializes to and from a `[u8; 16]`. 33 | #[derive(PartialEq, Eq, Clone, Copy, Default, Hash, Ord, PartialOrd)] 34 | pub struct AssetUuid(pub [u8; 16]); 35 | 36 | impl> From for AssetUuid { 37 | fn from(s: S) -> Self { 38 | AssetUuid( 39 | *Uuid::parse_str(s.as_ref()) 40 | .expect("Macro input is not a UUID string") 41 | .as_bytes(), 42 | ) 43 | } 44 | } 45 | 46 | impl AsMut<[u8]> for AssetUuid { 47 | fn as_mut(&mut self) -> &mut [u8] { 48 | &mut self.0 49 | } 50 | } 51 | 52 | impl AsRef<[u8]> for AssetUuid { 53 | fn as_ref(&self) -> &[u8] { 54 | &self.0 55 | } 56 | } 57 | 58 | impl fmt::Debug for AssetUuid { 59 | fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { 60 | f.debug_tuple("AssetUuid") 61 | .field(&uuid::Uuid::from_bytes(self.0)) 62 | .finish() 63 | } 64 | } 65 | 66 | impl fmt::Display for AssetUuid { 67 | fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { 68 | uuid::Uuid::from_bytes(self.0).fmt(f) 69 | } 70 | } 71 | 72 | #[cfg(feature = "serde-1")] 73 | impl Serialize for AssetUuid { 74 | fn serialize(&self, serializer: S) -> Result { 75 | if serializer.is_human_readable() { 76 | serializer.serialize_str(&self.to_string()) 77 | } else { 78 | self.0.serialize(serializer) 79 | } 80 | } 81 | } 82 | 83 | #[cfg(feature = "serde-1")] 84 | struct AssetUuidVisitor; 85 | 86 | #[cfg(feature = "serde-1")] 87 | impl<'a> Visitor<'a> for AssetUuidVisitor { 88 | type Value = AssetUuid; 89 | 90 | fn expecting(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result { 91 | write!(fmt, "a UUID-formatted string") 92 | } 93 | 94 | fn visit_str(self, s: &str) -> Result { 95 | uuid::Uuid::from_str(s) 96 | .map(|id| AssetUuid(*id.as_bytes())) 97 | .map_err(|_| de::Error::invalid_value(de::Unexpected::Str(s), &self)) 98 | } 99 | } 100 | 101 | #[cfg(feature = "serde-1")] 102 | impl<'de> Deserialize<'de> for AssetUuid { 103 | fn deserialize>(deserializer: D) -> Result { 104 | if deserializer.is_human_readable() { 105 | deserializer.deserialize_string(AssetUuidVisitor) 106 | } else { 107 | Ok(AssetUuid(<[u8; 16]>::deserialize(deserializer)?)) 108 | } 109 | } 110 | } 111 | 112 | /// UUID of an asset's Rust type. Produced by [`TypeUuidDynamic::uuid`]. 113 | /// 114 | /// If using a human-readable format, serializes to a hyphenated UUID format and deserializes from 115 | /// any format supported by the `uuid` crate. Otherwise, serializes to and from a `[u8; 16]`. 116 | #[derive(PartialEq, Eq, Debug, Clone, Copy, Default, Hash)] 117 | pub struct AssetTypeId(pub [u8; 16]); 118 | 119 | impl AsMut<[u8]> for AssetTypeId { 120 | fn as_mut(&mut self) -> &mut [u8] { 121 | &mut self.0 122 | } 123 | } 124 | 125 | impl AsRef<[u8]> for AssetTypeId { 126 | fn as_ref(&self) -> &[u8] { 127 | &self.0 128 | } 129 | } 130 | 131 | impl fmt::Display for AssetTypeId { 132 | fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { 133 | uuid::Uuid::from_bytes(self.0).fmt(f) 134 | } 135 | } 136 | 137 | #[cfg(feature = "serde-1")] 138 | impl Serialize for AssetTypeId { 139 | fn serialize(&self, serializer: S) -> Result { 140 | if serializer.is_human_readable() { 141 | serializer.serialize_str(&self.to_string()) 142 | } else { 143 | self.0.serialize(serializer) 144 | } 145 | } 146 | } 147 | 148 | #[cfg(feature = "serde-1")] 149 | struct AssetTypeIdVisitor; 150 | 151 | #[cfg(feature = "serde-1")] 152 | impl<'a> Visitor<'a> for AssetTypeIdVisitor { 153 | type Value = AssetTypeId; 154 | 155 | fn expecting(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result { 156 | write!(fmt, "a UUID-formatted string") 157 | } 158 | 159 | fn visit_str(self, s: &str) -> Result { 160 | uuid::Uuid::parse_str(s) 161 | .map(|id| AssetTypeId(*id.as_bytes())) 162 | .map_err(|_| de::Error::invalid_value(de::Unexpected::Str(s), &self)) 163 | } 164 | } 165 | 166 | #[cfg(feature = "serde-1")] 167 | impl<'de> Deserialize<'de> for AssetTypeId { 168 | fn deserialize>(deserializer: D) -> Result { 169 | if deserializer.is_human_readable() { 170 | deserializer.deserialize_string(AssetTypeIdVisitor) 171 | } else { 172 | Ok(AssetTypeId(<[u8; 16]>::deserialize(deserializer)?)) 173 | } 174 | } 175 | } 176 | 177 | /// A potentially unresolved reference to an asset 178 | #[derive(Debug, Hash, PartialEq, Eq, Clone, Ord, PartialOrd)] 179 | #[cfg_attr(feature = "serde-1", derive(serde::Serialize, serde::Deserialize))] 180 | pub enum AssetRef { 181 | Uuid(AssetUuid), 182 | Path(std::path::PathBuf), 183 | } 184 | impl AssetRef { 185 | pub fn expect_uuid(&self) -> &AssetUuid { 186 | if let AssetRef::Uuid(uuid) = self { 187 | uuid 188 | } else { 189 | panic!("Expected AssetRef::Uuid, got {:?}", self) 190 | } 191 | } 192 | 193 | pub fn is_path(&self) -> bool { 194 | matches!(self, AssetRef::Path(_)) 195 | } 196 | 197 | pub fn is_uuid(&self) -> bool { 198 | matches!(self, AssetRef::Uuid(_)) 199 | } 200 | } 201 | 202 | #[derive(Debug, Hash, PartialEq, Copy, Clone)] 203 | #[cfg_attr(feature = "serde-1", derive(serde::Serialize, serde::Deserialize))] 204 | pub enum CompressionType { 205 | None, 206 | Lz4, 207 | } 208 | 209 | impl Default for CompressionType { 210 | fn default() -> Self { 211 | Self::None 212 | } 213 | } 214 | 215 | /// Serializable metadata for an asset. 216 | /// Stored in .meta files and metadata DB. 217 | #[derive(Debug, Clone, Hash, Default)] 218 | #[cfg_attr(feature = "serde-1", derive(Serialize, Deserialize))] 219 | pub struct AssetMetadata { 220 | /// UUID for the asset to uniquely identify it 221 | pub id: AssetUuid, 222 | /// Search tags are used by asset tooling to search for the imported asset 223 | pub search_tags: Vec<(String, Option)>, 224 | /// The referenced build pipeline is invoked when a build artifact is requested for the imported asset 225 | pub build_pipeline: Option, 226 | /// The latest artifact produced when importing this asset 227 | pub artifact: Option, 228 | } 229 | 230 | /// 64-bit hash of the inputs that would produce a given asset artifact 231 | #[derive(Debug, Copy, Clone, Hash, Default)] 232 | #[cfg_attr(feature = "serde-1", derive(Serialize, Deserialize))] 233 | #[cfg_attr(feature = "serde-1", serde(transparent))] 234 | pub struct ArtifactId(pub u64); 235 | 236 | /// Serializable metadata for an artifact. 237 | /// Stored in .meta files and metadata DB. 238 | #[derive(Debug, Clone, Hash, Default)] 239 | #[cfg_attr(feature = "serde-1", derive(Serialize, Deserialize))] 240 | pub struct ArtifactMetadata { 241 | /// Hash that identifies this artifact 242 | pub id: ArtifactId, 243 | /// UUID for this artifact's asset 244 | pub asset_id: AssetUuid, 245 | /// Build dependencies will be included in the Builder arguments when building an asset 246 | pub build_deps: Vec, 247 | /// Load dependencies are guaranteed to load before this asset by the Loader 248 | pub load_deps: Vec, 249 | /// Type of compression used to compress this artifact 250 | pub compression: CompressionType, 251 | /// Size of this artifact in bytes when compressed 252 | pub compressed_size: Option, 253 | /// Size of this artifact in bytes when serialized and uncompressed 254 | pub uncompressed_size: Option, 255 | /// The UUID of the artifact's Rust type 256 | pub type_id: AssetTypeId, 257 | } 258 | 259 | /// Provides a unique 16-byte ID for a value's type. 260 | pub trait TypeUuidDynamic { 261 | fn uuid(&self) -> [u8; 16]; 262 | } 263 | 264 | #[cfg(feature = "type_uuid")] 265 | impl TypeUuidDynamic for T { 266 | fn uuid(&self) -> [u8; 16] { 267 | ::uuid(self) 268 | } 269 | } 270 | 271 | #[cfg(feature = "type_uuid")] 272 | pub use type_uuid; 273 | -------------------------------------------------------------------------------- /core/src/utils.rs: -------------------------------------------------------------------------------- 1 | use std::{ 2 | ffi::OsStr, 3 | hash::{Hash, Hasher}, 4 | path::{Path, PathBuf}, 5 | }; 6 | 7 | use crate::{AssetTypeId, AssetUuid}; 8 | 9 | pub fn make_array(slice: &[T]) -> A 10 | where 11 | A: Sized + Default + AsMut<[T]>, 12 | T: Copy, 13 | { 14 | let mut a = Default::default(); 15 | >::as_mut(&mut a).copy_from_slice(slice); 16 | a 17 | } 18 | 19 | pub fn type_from_slice(slice: &[u8]) -> Option { 20 | uuid_from_slice(slice).map(|uuid| AssetTypeId(uuid.0)) 21 | } 22 | 23 | pub fn uuid_from_slice(slice: &[u8]) -> Option { 24 | const BYTES_LEN: usize = 16; 25 | 26 | let len = slice.len(); 27 | 28 | if len != BYTES_LEN { 29 | return None; 30 | } 31 | 32 | let mut bytes: uuid::Bytes = [0; 16]; 33 | bytes.copy_from_slice(slice); 34 | Some(AssetUuid(bytes)) 35 | } 36 | 37 | pub fn to_meta_path(p: &Path) -> PathBuf { 38 | p.with_file_name(OsStr::new( 39 | &(p.file_name().unwrap().to_str().unwrap().to_owned() + ".meta"), 40 | )) 41 | } 42 | 43 | pub fn calc_import_artifact_hash(id: &AssetUuid, import_hash: u64, dep_list: T) -> u64 44 | where 45 | V: std::borrow::Borrow, 46 | T: IntoIterator, 47 | { 48 | let mut hasher = ::std::collections::hash_map::DefaultHasher::new(); 49 | import_hash.hash(&mut hasher); 50 | (*id).hash(&mut hasher); 51 | let mut deps: Vec<_> = dep_list.into_iter().collect(); 52 | deps.sort_by_key(|dep| *dep.borrow()); 53 | deps.dedup_by_key(|dep| *dep.borrow()); 54 | for dep in &deps { 55 | dep.borrow().hash(&mut hasher); 56 | } 57 | hasher.finish() 58 | } 59 | 60 | #[cfg(feature = "path_utils")] 61 | pub fn canonicalize_path(path: &Path) -> PathBuf { 62 | use path_slash::{PathBufExt, PathExt}; 63 | let cleaned_path = PathBuf::from_slash(path_clean::clean(&path.to_slash_lossy())); 64 | PathBuf::from(dunce::simplified(&cleaned_path)) 65 | } 66 | -------------------------------------------------------------------------------- /core/tests/uuid.rs: -------------------------------------------------------------------------------- 1 | extern crate bincode; 2 | extern crate distill_core; 3 | extern crate serde_json; 4 | 5 | #[test] 6 | fn serialize_asset_uuid_string() { 7 | let uuid = distill_core::AssetUuid([1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16]); 8 | 9 | let result = serde_json::to_string(&uuid).unwrap(); 10 | 11 | assert_eq!( 12 | "\"01020304-0506-0708-090a-0b0c0d0e0f10\"".to_string(), 13 | result 14 | ); 15 | } 16 | 17 | #[test] 18 | fn serialize_asset_uuid_binary() { 19 | let data = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16]; 20 | let uuid = distill_core::AssetUuid(data); 21 | 22 | let result: Vec = bincode::serialize(&uuid).unwrap(); 23 | 24 | assert_eq!(data.to_vec(), result); 25 | } 26 | 27 | #[test] 28 | fn deserialize_asset_uuid_string() { 29 | let string = "\"01020304-0506-0708-090a-0b0c0d0e0f10\""; 30 | 31 | let result: distill_core::AssetUuid = serde_json::from_str(string).unwrap(); 32 | 33 | let expected = distill_core::AssetUuid([1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16]); 34 | 35 | assert_eq!(expected, result); 36 | } 37 | 38 | #[test] 39 | fn deserialize_asset_uuid_binary() { 40 | let data = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16]; 41 | 42 | let result: distill_core::AssetUuid = bincode::deserialize(&data).unwrap(); 43 | 44 | assert_eq!(distill_core::AssetUuid(data), result); 45 | } 46 | 47 | #[test] 48 | fn serialize_type_uuid_string() { 49 | let uuid = distill_core::AssetTypeId([3, 1, 4, 1, 5, 9, 2, 6, 5, 3, 5, 8, 9, 7, 9, 3]); 50 | 51 | let result = serde_json::to_string(&uuid).unwrap(); 52 | 53 | assert_eq!( 54 | "\"03010401-0509-0206-0503-050809070903\"".to_string(), 55 | result 56 | ); 57 | } 58 | 59 | #[test] 60 | fn serialize_type_uuid_binary() { 61 | let data = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16]; 62 | let uuid = distill_core::AssetTypeId(data); 63 | 64 | let result: Vec = bincode::serialize(&uuid).unwrap(); 65 | 66 | assert_eq!(data.to_vec(), result); 67 | } 68 | 69 | #[test] 70 | fn deserialize_type_uuid_string() { 71 | let string = "\"03010401-0509-0206-0503-050809070903\""; 72 | 73 | let result: distill_core::AssetTypeId = serde_json::from_str(string).unwrap(); 74 | 75 | let expected = distill_core::AssetTypeId([3, 1, 4, 1, 5, 9, 2, 6, 5, 3, 5, 8, 9, 7, 9, 3]); 76 | 77 | assert_eq!(expected, result); 78 | } 79 | 80 | #[test] 81 | fn deserialize_type_uuid_binary() { 82 | let data = [3, 1, 4, 1, 5, 9, 2, 6, 5, 3, 5, 8, 9, 7, 9, 3]; 83 | 84 | let result: distill_core::AssetTypeId = bincode::deserialize(&data).unwrap(); 85 | 86 | assert_eq!(distill_core::AssetTypeId(data), result); 87 | } 88 | -------------------------------------------------------------------------------- /daemon/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "distill-daemon" 3 | version = "0.0.3" 4 | authors = ["Karl Bergström "] 5 | edition = "2018" 6 | license = "MIT OR Apache-2.0" 7 | description = "Daemon component of the asset pipeline `distill`." 8 | 9 | [dependencies] 10 | distill-core = { path = "../core", version = "=0.0.3", features = ["path_utils"] } 11 | distill-schema = { path = "../schema", version = "=0.0.3" } 12 | distill-importer = { path = "../importer", version = "=0.0.3" } 13 | distill-loader = { path = "../loader", version = "=0.0.3" } 14 | dunce = "1.0" 15 | path-clean = "0.1" 16 | path-slash = "0.1.1" 17 | capnp = { version = "0.14.0", features = ["unaligned"] } 18 | capnp-rpc = "0.14.0" 19 | notify = "4.0.15" 20 | distill-downstream-lmdb-rkv = "0.11.0-windows-fix" 21 | rayon = { version = "1.3", optional = true } 22 | log = { version = "0.4", features = ["serde", "std"] } 23 | futures = { version = "0.3", default-features = false, features = ["std", "async-await"] } 24 | event-listener = { version = "2.4.0" } 25 | serde = "1" 26 | serde_derive = "1.0" 27 | erased-serde = "0.3" 28 | bincode = "1.3.1" 29 | ron = "0.6.4" 30 | num_cpus = "1.10" 31 | fern = { version = "0.6.0", optional = true } 32 | chrono = { version = "0.4.19", default-features = false, features = ["clock"], optional = true } 33 | pin-project = "1.0" 34 | uuid = { version = "0.8.2", features = ["v4"] } 35 | async-io = "1.4.1" 36 | async-executor = "1.4.1" 37 | async-net = "1.6.0" 38 | async-fs = "1.5.0" 39 | async-lock = { version = "2.1" } 40 | async-channel = { version = "1.4" } 41 | bevy_tasks = "0.5.0" 42 | 43 | async-tungstenite = { version = "0.15", optional = true } 44 | 45 | [dev-dependencies] 46 | tempfile = "3.2.0" 47 | futures-test = "0.3.15" 48 | # used for timeout, which was vendored from async_std. It is only used in tests 49 | pin-project-lite = "0.2.6" 50 | 51 | [features] 52 | parallel_hash = ["rayon"] 53 | pretty_log = ["chrono", "fern"] 54 | ws = ["async-tungstenite"] 55 | -------------------------------------------------------------------------------- /daemon/src/artifact_cache.rs: -------------------------------------------------------------------------------- 1 | use std::sync::Arc; 2 | 3 | use distill_importer::SerializedAsset; 4 | use distill_schema::{build_artifact_metadata, data::artifact}; 5 | 6 | use crate::{ 7 | capnp_db::{DBTransaction, Environment, MessageReader, RoTransaction, RwTransaction}, 8 | error::Result, 9 | }; 10 | 11 | pub struct ArtifactCache { 12 | db: Arc, 13 | tables: ArtifactCacheTables, 14 | } 15 | 16 | struct ArtifactCacheTables { 17 | /// Maps a hash to the serialized artifact data 18 | /// u64 -> Artifact 19 | hash_to_artifact: lmdb::Database, 20 | } 21 | 22 | impl ArtifactCache { 23 | pub fn new(db: &Arc) -> Result { 24 | Ok(ArtifactCache { 25 | db: db.clone(), 26 | tables: ArtifactCacheTables { 27 | hash_to_artifact: db.create_db( 28 | Some("ArtifactCache::hash_to_artifact"), 29 | lmdb::DatabaseFlags::INTEGER_KEY, 30 | )?, 31 | }, 32 | }) 33 | } 34 | 35 | // TODO: invalidate cache 36 | #[allow(dead_code)] 37 | pub async fn delete(&self, hash: u64) -> Result { 38 | let mut txn = self.db.rw_txn().await?; 39 | Ok(txn 40 | .delete(self.tables.hash_to_artifact, &hash.to_le_bytes()) 41 | .expect("db: Failed to delete entry from hash_to_artifact table")) 42 | } 43 | 44 | pub fn insert>( 45 | &self, 46 | txn: &mut RwTransaction<'_>, 47 | artifact: &SerializedAsset, 48 | ) { 49 | txn.put( 50 | self.tables.hash_to_artifact, 51 | &artifact.metadata.id.0.to_le_bytes(), 52 | &build_artifact_message(artifact), 53 | ) 54 | .expect("lmdb: failed to put path ref"); 55 | } 56 | 57 | pub async fn ro_txn(&self) -> Result> { 58 | self.db.ro_txn().await 59 | } 60 | 61 | pub async fn rw_txn(&self) -> Result> { 62 | self.db.rw_txn().await 63 | } 64 | 65 | pub async fn get<'a, V: DBTransaction<'a, T>, T: lmdb::Transaction + 'a>( 66 | &self, 67 | txn: &'a V, 68 | hash: u64, 69 | ) -> Option> { 70 | txn.get::(self.tables.hash_to_artifact, &hash.to_le_bytes()) 71 | .expect("db: Failed to get entry from hash_to_artifact table") 72 | } 73 | 74 | // pub fn get_or_insert_with<'a, T: AsRef<[u8]>>( 75 | // &self, 76 | // txn: &'a mut RwTransaction, 77 | // inserter: impl FnOnce() -> SerializedAsset, 78 | // ) -> artifact::Reader<'a> { 79 | // match self.get(txn) { 80 | // Some(r) => r, 81 | // None => { 82 | // self.insert(txn, &inserter()); 83 | // self.get(txn).expect("Inserted in same transaction") 84 | // } 85 | // } 86 | // } 87 | } 88 | 89 | pub(crate) fn build_artifact_message>( 90 | artifact: &SerializedAsset, 91 | ) -> capnp::message::Builder { 92 | let mut value_builder = capnp::message::Builder::new_default(); 93 | { 94 | let mut m = value_builder.init_root::>(); 95 | let mut metadata = m.reborrow().init_metadata(); 96 | build_artifact_metadata(&artifact.metadata, &mut metadata); 97 | let slice: &[u8] = artifact.data.as_ref(); 98 | m.reborrow().set_data(slice); 99 | } 100 | value_builder 101 | } 102 | -------------------------------------------------------------------------------- /daemon/src/error.rs: -------------------------------------------------------------------------------- 1 | use std::{fmt, io, path::PathBuf, str}; 2 | 3 | #[derive(Debug)] 4 | pub enum Error { 5 | Notify(notify::Error), 6 | IO(io::Error), 7 | #[cfg(feature = "ws")] 8 | Websocket(async_tungstenite::tungstenite::Error), 9 | RescanRequired, 10 | Lmdb(lmdb::Error), 11 | Capnp(capnp::Error), 12 | NotInSchema(capnp::NotInSchema), 13 | BincodeError(bincode::ErrorKind), 14 | RonError(ron::Error), 15 | ErasedSerde(erased_serde::Error), 16 | MetaDeError(PathBuf, ron::Error), 17 | SetLoggerError(log::SetLoggerError), 18 | UuidLength, 19 | RecvError, 20 | SendError, 21 | Exit, 22 | ImporterError(distill_importer::Error), 23 | StrUtf8Error(str::Utf8Error), 24 | Custom(String), 25 | } 26 | 27 | pub type Result = std::result::Result; 28 | 29 | impl std::error::Error for Error { 30 | fn cause(&self) -> Option<&dyn std::error::Error> { 31 | match *self { 32 | Error::Notify(ref e) => Some(e), 33 | Error::IO(ref e) => Some(e), 34 | #[cfg(feature = "ws")] 35 | Error::Websocket(ref e) => Some(e), 36 | Error::RescanRequired => None, 37 | Error::Lmdb(ref e) => Some(e), 38 | Error::Capnp(ref e) => Some(e), 39 | Error::NotInSchema(ref e) => Some(e), 40 | Error::BincodeError(ref e) => Some(e), 41 | Error::ErasedSerde(ref e) => Some(e), 42 | Error::RonError(ref e) => Some(e), 43 | Error::MetaDeError(_, ref e) => Some(e), 44 | Error::SetLoggerError(ref e) => Some(e), 45 | Error::UuidLength => None, 46 | Error::RecvError => None, 47 | Error::SendError => None, 48 | Error::Exit => None, 49 | Error::ImporterError(ref e) => Some(e), 50 | Error::StrUtf8Error(ref e) => Some(e), 51 | Error::Custom(ref _e) => None, 52 | } 53 | } 54 | } 55 | impl fmt::Display for Error { 56 | fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { 57 | match *self { 58 | Error::Notify(ref e) => e.fmt(f), 59 | Error::IO(ref e) => e.fmt(f), 60 | #[cfg(feature = "ws")] 61 | Error::Websocket(ref e) => e.fmt(f), 62 | Error::RescanRequired => write!(f, "{}", self), 63 | Error::Lmdb(ref e) => e.fmt(f), 64 | Error::Capnp(ref e) => e.fmt(f), 65 | Error::NotInSchema(ref e) => e.fmt(f), 66 | Error::BincodeError(ref e) => e.fmt(f), 67 | Error::ErasedSerde(ref e) => e.fmt(f), 68 | Error::RonError(ref e) => e.fmt(f), 69 | Error::MetaDeError(ref path, ref e) => { 70 | write!(f, "metadata {} ", path.display())?; 71 | e.fmt(f) 72 | } 73 | Error::SetLoggerError(ref e) => e.fmt(f), 74 | Error::UuidLength => write!(f, "{}", self), 75 | Error::RecvError => write!(f, "{}", self), 76 | Error::SendError => write!(f, "{}", self), 77 | Error::Exit => write!(f, "{}", self), 78 | Error::ImporterError(ref e) => e.fmt(f), 79 | Error::StrUtf8Error(ref e) => e.fmt(f), 80 | Error::Custom(ref s) => f.write_str(s.as_str()), 81 | } 82 | } 83 | } 84 | impl From for Error { 85 | fn from(err: notify::Error) -> Error { 86 | Error::Notify(err) 87 | } 88 | } 89 | impl From for Error { 90 | fn from(err: io::Error) -> Error { 91 | Error::IO(err) 92 | } 93 | } 94 | #[cfg(feature = "ws")] 95 | impl From for Error { 96 | fn from(err: async_tungstenite::tungstenite::Error) -> Error { 97 | Error::Websocket(err) 98 | } 99 | } 100 | impl From for Error { 101 | fn from(err: lmdb::Error) -> Error { 102 | Error::Lmdb(err) 103 | } 104 | } 105 | impl From for Error { 106 | fn from(err: capnp::Error) -> Error { 107 | Error::Capnp(err) 108 | } 109 | } 110 | impl From for Error { 111 | fn from(err: capnp::NotInSchema) -> Error { 112 | Error::NotInSchema(err) 113 | } 114 | } 115 | impl From> for Error { 116 | fn from(err: Box) -> Error { 117 | Error::BincodeError(*err) 118 | } 119 | } 120 | impl From for Error { 121 | fn from(err: ron::Error) -> Error { 122 | Error::RonError(err) 123 | } 124 | } 125 | 126 | impl From for Error { 127 | fn from(err: erased_serde::Error) -> Error { 128 | Error::ErasedSerde(err) 129 | } 130 | } 131 | impl From for capnp::Error { 132 | fn from(err: Error) -> capnp::Error { 133 | capnp::Error::failed(format!("{}", err)) 134 | } 135 | } 136 | impl From for Error { 137 | fn from(err: log::SetLoggerError) -> Error { 138 | Error::SetLoggerError(err) 139 | } 140 | } 141 | impl From for Error { 142 | fn from(err: distill_importer::Error) -> Error { 143 | Error::ImporterError(err) 144 | } 145 | } 146 | impl From for Error { 147 | fn from(err: str::Utf8Error) -> Error { 148 | Error::StrUtf8Error(err) 149 | } 150 | } 151 | -------------------------------------------------------------------------------- /daemon/src/extension_map.rs: -------------------------------------------------------------------------------- 1 | use std::{borrow::Borrow, collections::HashMap, hash::Hash, path::Path}; 2 | 3 | struct SequenceTrie { 4 | value: Option, 5 | children: HashMap>, 6 | } 7 | impl Default for SequenceTrie { 8 | fn default() -> Self { 9 | Self { 10 | value: Default::default(), 11 | children: Default::default(), 12 | } 13 | } 14 | } 15 | impl SequenceTrie { 16 | fn insert(&mut self, key: impl Iterator, value: V) -> Option { 17 | let node = key.fold(self, |node, k| { 18 | node.children.entry(k).or_insert_with(SequenceTrie::default) 19 | }); 20 | std::mem::replace(&mut node.value, Some(value)) 21 | } 22 | 23 | fn get<'a, Q: ?Sized, I>(&self, key: I) -> Option<&V> 24 | where 25 | K: Borrow, 26 | Q: Hash + Eq + 'a, 27 | I: Iterator, 28 | { 29 | let mut current = self; 30 | for fragment in key { 31 | match current.children.get(fragment.borrow()) { 32 | Some(node) => current = node, 33 | None => return None, 34 | } 35 | } 36 | current.value.as_ref() 37 | } 38 | } 39 | 40 | pub struct ExtensionMap { 41 | map: SequenceTrie, 42 | values: Vec, 43 | } 44 | impl Default for ExtensionMap { 45 | fn default() -> Self { 46 | Self { 47 | map: Default::default(), 48 | values: Default::default(), 49 | } 50 | } 51 | } 52 | 53 | impl ExtensionMap { 54 | pub fn insert(&mut self, extensions: &[&str], value: T) { 55 | let idx = self.values.len(); 56 | self.values.push(value); 57 | 58 | extensions 59 | .iter() 60 | .for_each(|extension| self.insert_inner(extension.as_ref(), idx)); 61 | } 62 | 63 | fn insert_inner(&mut self, extension: &str, idx: usize) { 64 | let key = extension.rsplit('.').map(|e| e.to_lowercase()); 65 | let already_in = self.map.insert(key, idx).is_some(); 66 | if already_in { 67 | panic!("extension '{}' already present", extension); 68 | } 69 | } 70 | 71 | pub fn get(&self, path: &Path) -> Option<&T> { 72 | let path = path.to_str().expect("non-utf8 path").to_lowercase(); 73 | let mut extension = path.split('.'); 74 | 75 | let idx = loop { 76 | match self.map.get(extension.clone().rev()) { 77 | Some(i) => break *i, 78 | None => { 79 | extension.next()?; 80 | } 81 | } 82 | }; 83 | 84 | Some(&self.values[idx]) 85 | } 86 | } 87 | #[cfg(test)] 88 | mod tests { 89 | use std::path::Path; 90 | 91 | use crate::extension_map::ExtensionMap; 92 | 93 | #[test] 94 | fn single() { 95 | let mut map = ExtensionMap::default(); 96 | map.insert(&["txt"], "text"); 97 | 98 | assert_eq!(map.get(Path::new("no")), None); 99 | assert_eq!(map.get(Path::new("test.txt")), Some(&"text")); 100 | assert_eq!(map.get(Path::new("test.my.txt")), Some(&"text")); 101 | } 102 | 103 | #[test] 104 | fn specificity() { 105 | let mut map = ExtensionMap::default(); 106 | map.insert(&["ron"], "RON"); 107 | map.insert(&["scn.ron", "scn"], "SCENE"); 108 | map.insert(&["this.scn.ron"], "THIS"); 109 | 110 | assert_eq!(map.get(Path::new("file.ron")), Some(&"RON")); 111 | assert_eq!(map.get(Path::new("file.scn")), Some(&"SCENE")); 112 | assert_eq!(map.get(Path::new("file.scn.ron")), Some(&"SCENE")); 113 | assert_eq!(map.get(Path::new("this.scn.ron")), Some(&"THIS")); 114 | } 115 | 116 | #[test] 117 | fn alias() { 118 | let mut map = ExtensionMap::default(); 119 | map.insert(&["this", "that"], ()); 120 | 121 | assert_eq!(map.get(Path::new("a.this")), Some(&())); 122 | assert_eq!(map.get(Path::new("a.that")), Some(&())); 123 | } 124 | 125 | #[test] 126 | fn case_insensitive() { 127 | let mut map = ExtensionMap::default(); 128 | map.insert(&["eXteNsIon"], ()); 129 | 130 | assert_eq!(map.get(Path::new("a.extension")), Some(&())); 131 | assert_eq!(map.get(Path::new("a.EXTENSION")), Some(&())); 132 | } 133 | } 134 | -------------------------------------------------------------------------------- /daemon/src/lib.rs: -------------------------------------------------------------------------------- 1 | #![allow(unknown_lints)] 2 | #![deny( 3 | rust_2018_compatibility, 4 | rust_2018_idioms, 5 | unused, 6 | unused_extern_crates, 7 | future_incompatible, 8 | nonstandard_style 9 | )] 10 | #![warn(clippy::all)] 11 | #![allow(clippy::rc_buffer)] // https://github.com/rust-lang/rust-clippy/issues/6170 12 | 13 | mod artifact_cache; 14 | mod asset_hub; 15 | mod asset_hub_service; 16 | mod capnp_db; 17 | mod daemon; 18 | mod error; 19 | mod extension_map; 20 | mod file_asset_source; 21 | mod file_tracker; 22 | mod serialized_asset; 23 | mod source_pair_import; 24 | mod watcher; 25 | #[cfg(feature = "ws")] 26 | mod websocket_async_io; 27 | 28 | // This module is only used from test code 29 | #[cfg(test)] 30 | mod timeout; 31 | 32 | pub use crate::{ 33 | daemon::{default_importer_contexts, default_importers, AssetDaemon, ImporterMap}, 34 | error::{Error, Result}, 35 | }; 36 | 37 | #[cfg(debug_assertions)] 38 | const DEFAULT_LOGGING_LEVEL: log::LevelFilter = log::LevelFilter::Debug; 39 | #[cfg(not(debug_assertions))] 40 | const DEFAULT_LOGGING_LEVEL: log::LevelFilter = log::LevelFilter::Info; 41 | 42 | mod simple_logger { 43 | use log::{Level, Metadata, Record}; 44 | 45 | pub struct SimpleLogger; 46 | 47 | impl log::Log for SimpleLogger { 48 | fn enabled(&self, metadata: &Metadata<'_>) -> bool { 49 | metadata.level() <= Level::Info 50 | } 51 | 52 | fn log(&self, record: &Record<'_>) { 53 | if self.enabled(record.metadata()) { 54 | println!("{} - {}", record.level(), record.args()); 55 | } 56 | } 57 | 58 | fn flush(&self) {} 59 | } 60 | } 61 | #[cfg(not(feature = "pretty_log"))] 62 | static LOGGER: simple_logger::SimpleLogger = simple_logger::SimpleLogger; 63 | 64 | #[cfg(not(feature = "pretty_log"))] 65 | pub fn init_logging() -> Result<()> { 66 | let rust_log = std::env::var("RUST_LOG").unwrap_or_else(|_| "".to_string()); 67 | let log_level = ::from_str(&rust_log) 68 | .unwrap_or(DEFAULT_LOGGING_LEVEL); 69 | log::set_logger(&LOGGER) 70 | .map(|()| log::set_max_level(log_level)) 71 | .map_err(Error::SetLoggerError) 72 | } 73 | #[cfg(feature = "pretty_log")] 74 | pub fn init_logging() -> Result<()> { 75 | use chrono::Local; 76 | let rust_log = std::env::var("RUST_LOG").unwrap_or_else(|_| "".to_string()); 77 | let log_level = ::from_str(&rust_log) 78 | .unwrap_or(DEFAULT_LOGGING_LEVEL); 79 | fern::Dispatch::new() 80 | .format(|out, message, record| { 81 | out.finish(format_args!( 82 | "[{timestamp}][{level}][{target}] {message}", 83 | level = record.level(), 84 | timestamp = Local::now().format("%Y-%m-%dT%H:%M:%S%.3f"), 85 | target = record.target(), 86 | message = message, 87 | )) 88 | }) 89 | .chain(std::io::stdout()) 90 | .level(log_level) 91 | // .chain(fern::log_file("output.log")?) 92 | .apply()?; 93 | Ok(()) 94 | } 95 | -------------------------------------------------------------------------------- /daemon/src/serialized_asset.rs: -------------------------------------------------------------------------------- 1 | use distill_core::{ArtifactId, AssetRef, AssetTypeId, AssetUuid, CompressionType}; 2 | use distill_importer::{ArtifactMetadata, SerdeObj, SerializedAsset}; 3 | 4 | use crate::Result; 5 | 6 | pub fn create( 7 | hash: u64, 8 | id: AssetUuid, 9 | build_deps: Vec, 10 | load_deps: Vec, 11 | value: &dyn SerdeObj, 12 | compression: CompressionType, 13 | scratch_buf: &mut Vec, 14 | ) -> Result>> { 15 | let size = bincode::serialized_size(value)? as usize; 16 | scratch_buf.clear(); 17 | scratch_buf.resize(size, 0); 18 | bincode::serialize_into(scratch_buf.as_mut_slice(), value)?; 19 | let asset_buf = { 20 | match compression { 21 | CompressionType::None => scratch_buf.clone(), 22 | CompressionType::Lz4 => unimplemented!(), 23 | } 24 | }; 25 | 26 | Ok(SerializedAsset { 27 | metadata: ArtifactMetadata { 28 | id: ArtifactId(hash), 29 | asset_id: id, 30 | build_deps, 31 | load_deps, 32 | compression, 33 | uncompressed_size: Some(size as u64), 34 | compressed_size: Some(asset_buf.len() as u64), 35 | type_id: AssetTypeId(value.uuid()), 36 | }, 37 | data: asset_buf, 38 | }) 39 | } 40 | -------------------------------------------------------------------------------- /daemon/src/timeout.rs: -------------------------------------------------------------------------------- 1 | //NOTE: This is vendored from async_std under Apache 2.0/MIT license. (see EDIT comments below 2 | // for changes) 3 | 4 | use std::error::Error; 5 | use std::fmt; 6 | use std::future::Future; 7 | use std::pin::Pin; 8 | use std::time::Duration; 9 | 10 | use pin_project_lite::pin_project; 11 | 12 | //EDIT use std directly 13 | //use crate::task::{Context, Poll}; 14 | use std::task::{Context, Poll}; 15 | 16 | //EDIT: Use async_io directly 17 | //use crate::utils::{timer_after, Timer}; 18 | use async_io::Timer; 19 | 20 | /// Awaits a future or times out after a duration of time. 21 | /// 22 | /// If you want to await an I/O future consider using 23 | /// [`io::timeout`](../io/fn.timeout.html) instead. 24 | /// 25 | /// # Examples 26 | /// 27 | /// ``` 28 | /// # fn main() -> std::io::Result<()> { async_std::task::block_on(async { 29 | /// # 30 | /// use std::time::Duration; 31 | /// 32 | /// use async_std::future; 33 | /// 34 | /// let never = future::pending::<()>(); 35 | /// let dur = Duration::from_millis(5); 36 | /// assert!(future::timeout(dur, never).await.is_err()); 37 | /// # 38 | /// # Ok(()) }) } 39 | /// ``` 40 | // EDIT: Allow dead code (macOS does not run the tests that require this) 41 | #[allow(dead_code)] 42 | pub async fn timeout(dur: Duration, f: F) -> Result 43 | where 44 | F: Future, 45 | { 46 | TimeoutFuture::new(f, dur).await 47 | } 48 | 49 | pin_project! { 50 | /// A future that times out after a duration of time. 51 | pub struct TimeoutFuture { 52 | #[pin] 53 | future: F, 54 | #[pin] 55 | delay: Timer, 56 | } 57 | } 58 | 59 | impl TimeoutFuture { 60 | #[allow(dead_code)] 61 | pub(super) fn new(future: F, dur: Duration) -> TimeoutFuture { 62 | TimeoutFuture { 63 | future, 64 | //EDIT: Call timer directly 65 | //delay: timer_after(dur), 66 | delay: Timer::after(dur), 67 | } 68 | } 69 | } 70 | 71 | impl Future for TimeoutFuture { 72 | type Output = Result; 73 | 74 | fn poll(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll { 75 | let this = self.project(); 76 | match this.future.poll(cx) { 77 | Poll::Ready(v) => Poll::Ready(Ok(v)), 78 | Poll::Pending => match this.delay.poll(cx) { 79 | Poll::Ready(_) => Poll::Ready(Err(TimeoutError { _private: () })), 80 | Poll::Pending => Poll::Pending, 81 | }, 82 | } 83 | } 84 | } 85 | 86 | /// An error returned when a future times out. 87 | #[derive(Clone, Copy, Debug, Eq, PartialEq)] 88 | pub struct TimeoutError { 89 | _private: (), 90 | } 91 | 92 | impl Error for TimeoutError {} 93 | 94 | impl fmt::Display for TimeoutError { 95 | fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { 96 | "future has timed out".fmt(f) 97 | } 98 | } 99 | -------------------------------------------------------------------------------- /daemon/src/websocket_async_io.rs: -------------------------------------------------------------------------------- 1 | //! `AsyncRead` and `AsyncWrite` implementations on top of `tungestenite`-websockets 2 | 3 | use std::io; 4 | 5 | use async_tungstenite::tungstenite::{Error, Message}; 6 | use futures::{future, Sink}; 7 | use futures::{AsyncRead, AsyncWrite, SinkExt, StreamExt, TryStreamExt}; 8 | use std::{pin::Pin, task::Poll}; 9 | 10 | pub async fn accept_websocket_stream( 11 | stream: S, 12 | ) -> Result<(impl AsyncRead, impl AsyncWrite), Error> 13 | where 14 | S: AsyncRead + AsyncWrite + Unpin, 15 | { 16 | let ws_stream = async_tungstenite::accept_async(stream).await?; 17 | 18 | let (sink, stream) = ws_stream.split(); 19 | 20 | let stream = stream 21 | .and_then(|message| async { 22 | match message { 23 | Message::Binary(bytes) => Ok(bytes), 24 | Message::Close(_) => Err(Error::ConnectionClosed), 25 | other => Err(Error::Io(io::Error::new( 26 | io::ErrorKind::Other, 27 | format!( 28 | "tungstenite-async-io can only handle binary messages, got {:?}", 29 | other 30 | ), 31 | ))), 32 | } 33 | }) 34 | .take_while(|res| match res { 35 | Err(Error::ConnectionClosed) => future::ready(false), 36 | _ => future::ready(true), 37 | }) 38 | .map_err(|e| io::Error::new(io::ErrorKind::Other, e)); 39 | 40 | let stream = Box::pin(stream); 41 | let async_read = stream.into_async_read(); 42 | 43 | let sink = sink.with(|data: Vec| async { Ok::<_, Error>(Message::Binary(data)) }); 44 | let sink = Box::pin(sink); 45 | let async_write = IntoAsyncWrite::new(sink); 46 | 47 | Ok((async_read, async_write)) 48 | } 49 | 50 | struct IntoAsyncWrite> + Unpin> { 51 | sink: S, 52 | buffer: Vec, 53 | } 54 | 55 | impl> + Unpin> IntoAsyncWrite { 56 | pub fn new(sink: S) -> Self { 57 | IntoAsyncWrite { 58 | sink, 59 | buffer: Vec::new(), 60 | } 61 | } 62 | } 63 | 64 | impl AsyncWrite for IntoAsyncWrite 65 | where 66 | S: Sink> + Unpin, 67 | S::Error: std::error::Error + Send + Sync + 'static, 68 | { 69 | fn poll_write( 70 | self: std::pin::Pin<&mut Self>, 71 | _: &mut std::task::Context<'_>, 72 | buf: &[u8], 73 | ) -> std::task::Poll> { 74 | self.get_mut().buffer.extend_from_slice(buf); 75 | Poll::Ready(Ok(buf.len())) 76 | } 77 | 78 | fn poll_flush( 79 | mut self: std::pin::Pin<&mut Self>, 80 | cx: &mut std::task::Context<'_>, 81 | ) -> std::task::Poll> { 82 | let map_err = |e| io::Error::new(io::ErrorKind::Other, e); 83 | 84 | let sink = Pin::new(&mut self.sink); 85 | match sink.poll_ready(cx) { 86 | Poll::Ready(Ok(())) => {} 87 | Poll::Ready(Err(e)) => return Poll::Ready(Err(map_err(e))), 88 | Poll::Pending => return Poll::Pending, 89 | } 90 | 91 | let buffer = std::mem::take(&mut self.buffer); 92 | let sink = Pin::new(&mut self.sink); 93 | sink.start_send(buffer).map_err(map_err)?; 94 | 95 | let sink = Pin::new(&mut self.sink); 96 | sink.poll_flush(cx).map_err(map_err) 97 | } 98 | 99 | fn poll_close( 100 | mut self: std::pin::Pin<&mut Self>, 101 | cx: &mut std::task::Context<'_>, 102 | ) -> std::task::Poll> { 103 | let map_err = |e| io::Error::new(io::ErrorKind::Other, e); 104 | 105 | Pin::new(&mut self.sink).poll_close(cx).map_err(map_err) 106 | } 107 | } 108 | -------------------------------------------------------------------------------- /daemon/tests/file_tracker/test.txt: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/amethyst/distill/852e48272ab3dad76c7490fce8df52461f94ebc0/daemon/tests/file_tracker/test.txt -------------------------------------------------------------------------------- /deny.toml: -------------------------------------------------------------------------------- 1 | [advisories] 2 | # Reduced from deny to warn, these fail our CI and are generally not 3 | # actionable within this project. 4 | vulnerability = "warn" 5 | 6 | [licenses] 7 | allow = ["MIT", "Apache-2.0"] 8 | copyleft = "deny" 9 | confidence-threshold = 1.0 10 | 11 | exceptions = [ 12 | { allow = ["ISC"], name = "inotify-sys", version = "*" }, 13 | { allow = ["ISC"], name = "inotify", version = "*" }, 14 | { allow = ["CC0-1.0"], name = "notify", version = "*" }, 15 | { allow = ["BSD-3-Clause"], name = "fuchsia-zircon", version = "*" }, 16 | { allow = ["BSD-3-Clause"], name = "fuchsia-zircon-sys", version = "*" }, 17 | { allow = ["BSD-3-Clause"], name = "instant", version = "*" }, 18 | { allow = ["CC0-1.0"], name = "dunce", version = "*" }, 19 | ] 20 | 21 | [bans] 22 | multiple-versions = "deny" 23 | wildcards = "deny" 24 | 25 | skip = [ 26 | { name = "mio", version = "0.6.23" }, # upgrade notify 27 | { name = "miow", version = "0.2.2" }, # upgrade notify 28 | { name = "cfg-if", version = "0.1.10" }, 29 | { name = "winapi", version = "0.2.8" }, # upgrade notify 30 | { name = "redox_syscall", version = "0.1.57" }, # old version in notify 31 | ] 32 | 33 | [sources] 34 | unknown-git = "deny" 35 | -------------------------------------------------------------------------------- /docs/graphics/build.dot: -------------------------------------------------------------------------------- 1 | digraph import { 2 | intermediate [shape=record,label="Asset Intermediate Format"]; 3 | pipeline [label="Processing Pipeline"]; 4 | build_artifact [shape=record, label="{Asset Build Artifact|{Engine TypeUUID}|Data}"]; 5 | pack_file [shape=record, label="{Packfile}"]; 6 | build_cache [label="Build Cache"]; 7 | engine [label="Engine"]; 8 | packing [label="Packing"]; 9 | asset_hub_api [label="Asset Hub RPC"]; 10 | 11 | 12 | intermediate -> pipeline -> build_artifact 13 | build_artifact -> packing -> pack_file -> engine; 14 | build_artifact -> build_cache -> asset_hub_api -> engine; 15 | } -------------------------------------------------------------------------------- /docs/graphics/build.svg: -------------------------------------------------------------------------------- 1 | 2 | 4 | 6 | 7 | 9 | 10 | import 11 | 12 | 13 | intermediate 14 | 15 | Asset Intermediate Format 16 | 17 | 18 | pipeline 19 | 20 | Processing Pipeline 21 | 22 | 23 | intermediate->pipeline 24 | 25 | 26 | 27 | 28 | build_artifact 29 | 30 | Asset Build Artifact 31 | 32 | Engine TypeUUID 33 | 34 | Data 35 | 36 | 37 | pipeline->build_artifact 38 | 39 | 40 | 41 | 42 | build_cache 43 | 44 | Build Cache 45 | 46 | 47 | build_artifact->build_cache 48 | 49 | 50 | 51 | 52 | packing 53 | 54 | Packing 55 | 56 | 57 | build_artifact->packing 58 | 59 | 60 | 61 | 62 | pack_file 63 | 64 | Packfile 65 | 66 | 67 | engine 68 | 69 | Engine 70 | 71 | 72 | pack_file->engine 73 | 74 | 75 | 76 | 77 | asset_hub_api 78 | 79 | Asset Hub RPC 80 | 81 | 82 | build_cache->asset_hub_api 83 | 84 | 85 | 86 | 87 | packing->pack_file 88 | 89 | 90 | 91 | 92 | asset_hub_api->engine 93 | 94 | 95 | 96 | 97 | 98 | -------------------------------------------------------------------------------- /docs/graphics/import.dot: -------------------------------------------------------------------------------- 1 | digraph import { 2 | source_file [shape=plaintext,label="Source File"]; 3 | source_file_metadata [shape=plaintext,label=".meta"]; 4 | file_asset_source [label="File Asset Source"]; 5 | importer_input [shape=record,label="{{Importer Options|Importer State}|Processing Pipeline Template|Source File}"]; 6 | intermediate [shape=record,label=" 7 | {Asset Intermediate Format|{ID|Search Tags|Dependencies}|{Processing Pipeline Instance}|Data} 8 | "]; 9 | importer [label="Importer"]; 10 | asset_hub [label="Asset Hub\n(indexing)"]; 11 | 12 | source_file -> file_asset_source -> importer_input -> importer -> intermediate; 13 | source_file_metadata -> file_asset_source; 14 | importer -> intermediate; 15 | importer -> intermediate; 16 | intermediate -> asset_hub; 17 | } -------------------------------------------------------------------------------- /docs/graphics/import.svg: -------------------------------------------------------------------------------- 1 | 2 | 4 | 6 | 7 | 9 | 10 | import 11 | 12 | 13 | source_file 14 | Source File 15 | 16 | 17 | file_asset_source 18 | 19 | File Asset Source 20 | 21 | 22 | source_file->file_asset_source 23 | 24 | 25 | 26 | 27 | source_file_metadata 28 | .meta 29 | 30 | 31 | source_file_metadata->file_asset_source 32 | 33 | 34 | 35 | 36 | importer_input 37 | 38 | Importer Options 39 | 40 | Importer State 41 | 42 | Source File 43 | 44 | 45 | file_asset_source->importer_input 46 | 47 | 48 | 49 | 50 | importer 51 | 52 | Importer 53 | 54 | 55 | importer_input->importer 56 | 57 | 58 | 59 | 60 | intermediate 61 | 62 | Asset Intermediate Format 63 | 64 | ID 65 | 66 | Search Tags 67 | 68 | Dependencies 69 | 70 | Processing Pipeline 71 | 72 | Data 73 | 74 | 75 | asset_hub 76 | 77 | Asset Hub 78 | (indexing) 79 | 80 | 81 | intermediate->asset_hub 82 | 83 | 84 | 85 | 86 | importer->intermediate 87 | 88 | 89 | 90 | 91 | importer->intermediate 92 | 93 | 94 | 95 | 96 | importer->intermediate 97 | 98 | 99 | 100 | 101 | 102 | -------------------------------------------------------------------------------- /docs/graphics/overview.dot: -------------------------------------------------------------------------------- 1 | digraph architecture { 2 | overview [shape=record,label=" Frontend |{ Processing | Validation }| Backend"]; 3 | } -------------------------------------------------------------------------------- /docs/graphics/overview.svg: -------------------------------------------------------------------------------- 1 | 2 | 4 | 6 | 7 | 9 | 10 | architecture 11 | 12 | 13 | overview 14 | 15 | Frontend 16 | 17 | Processing 18 | 19 | Validation 20 | 21 | Backend 22 | 23 | 24 | 25 | -------------------------------------------------------------------------------- /docs/graphics/pipeline_example.dot: -------------------------------------------------------------------------------- 1 | digraph import { 2 | rankdir=LR 3 | intermediate [shape=record,label=" 4 | Asset Intermediate Format|Data 5 | "]; 6 | platform_backend1 [label="Asset Platform Backend"] 7 | platform_backend2 [label="Asset Platform Backend"] 8 | graph_node1 [shape=record, label="Graph Node|{Inputs|Outputs}|Generated Asset Slots"]; 9 | graph_node2 [shape=record, label="Graph Node|{Inputs|Outputs}|Generated Asset Slots"]; 10 | graph_node3 [shape=record, label="Graph Node|{Inputs|Outputs}|Generated Asset Slots"]; 11 | intermediate:data -> graph_node1:in 12 | graph_node1:out -> graph_node2:in 13 | graph_node1:out -> graph_node3:in 14 | graph_node2:out -> platform_backend1 15 | graph_node2:out -> platform_backend2 16 | graph_node3:out -> platform_backend2 17 | graph_node3:out -> platform_backend1 18 | } -------------------------------------------------------------------------------- /docs/graphics/pipeline_example.svg: -------------------------------------------------------------------------------- 1 | 2 | 4 | 6 | 7 | 9 | 10 | import 11 | 12 | 13 | intermediate 14 | 15 | Asset Intermediate Format 16 | 17 | Data 18 | 19 | 20 | graph_node1 21 | 22 | Graph Node 23 | 24 | Inputs 25 | 26 | Outputs 27 | 28 | Generated Asset Slots 29 | 30 | 31 | intermediate:data->graph_node1:in 32 | 33 | 34 | 35 | 36 | platform_backend1 37 | 38 | Asset Platform Backend 39 | 40 | 41 | platform_backend2 42 | 43 | Asset Platform Backend 44 | 45 | 46 | graph_node2 47 | 48 | Graph Node 49 | 50 | Inputs 51 | 52 | Outputs 53 | 54 | Generated Asset Slots 55 | 56 | 57 | graph_node1:out->graph_node2:in 58 | 59 | 60 | 61 | 62 | graph_node3 63 | 64 | Graph Node 65 | 66 | Inputs 67 | 68 | Outputs 69 | 70 | Generated Asset Slots 71 | 72 | 73 | graph_node1:out->graph_node3:in 74 | 75 | 76 | 77 | 78 | graph_node2:out->platform_backend1 79 | 80 | 81 | 82 | 83 | graph_node2:out->platform_backend2 84 | 85 | 86 | 87 | 88 | graph_node3:out->platform_backend1 89 | 90 | 91 | 92 | 93 | graph_node3:out->platform_backend2 94 | 95 | 96 | 97 | 98 | 99 | -------------------------------------------------------------------------------- /docs/graphics/processing_example_meshopt.dot: -------------------------------------------------------------------------------- 1 | digraph import { 2 | intermediate [shape=record,label="{Asset Intermediate Format|MeshData}"]; 3 | intermediate2 [shape=record,label="{Asset Intermediate Format|MeshData}"]; 4 | intermediate3 [shape=record,label="{Asset Intermediate Format|MeshData}"]; 5 | mesh_indexing [label="Mesh Indexing\n(creates index buffer)"]; 6 | vertex_cache_opt [label="Vertex Cache\nOptimization"]; 7 | platform_backend [label="Mesh Platform Backend"]; 8 | 9 | intermediate -> mesh_indexing -> intermediate2 -> vertex_cache_opt -> intermediate3 -> platform_backend 10 | 11 | } -------------------------------------------------------------------------------- /docs/graphics/processing_example_meshopt.svg: -------------------------------------------------------------------------------- 1 | 2 | 4 | 6 | 7 | 9 | 10 | import 11 | 12 | 13 | intermediate 14 | 15 | Asset Intermediate Format 16 | 17 | MeshData 18 | 19 | 20 | mesh_indexing 21 | 22 | Mesh Indexing 23 | (creates index buffer) 24 | 25 | 26 | intermediate->mesh_indexing 27 | 28 | 29 | 30 | 31 | intermediate2 32 | 33 | Asset Intermediate Format 34 | 35 | MeshData 36 | 37 | 38 | vertex_cache_opt 39 | 40 | Vertex Cache 41 | Optimization 42 | 43 | 44 | intermediate2->vertex_cache_opt 45 | 46 | 47 | 48 | 49 | intermediate3 50 | 51 | Asset Intermediate Format 52 | 53 | MeshData 54 | 55 | 56 | platform_backend 57 | 58 | Mesh Platform Backend 59 | 60 | 61 | intermediate3->platform_backend 62 | 63 | 64 | 65 | 66 | mesh_indexing->intermediate2 67 | 68 | 69 | 70 | 71 | vertex_cache_opt->intermediate3 72 | 73 | 74 | 75 | 76 | 77 | -------------------------------------------------------------------------------- /docs/graphics/processing_example_spritesheet.dot: -------------------------------------------------------------------------------- 1 | digraph import { 2 | image1 [shape=record,label="{Asset Intermediate Format|Image 1}"]; 3 | image2 [shape=record,label="{Asset Intermediate Format|Image 2}"]; 4 | image3 [shape=record,label="{Asset Intermediate Format|Image 3}"]; 5 | spritesheet_generator [label="Spritesheet Generator"]; 6 | intermediate1 [shape=record,label="{Asset Intermediate Format|Spritesheet}"]; 7 | platform_backend [label="Spritesheet Platform Backend"]; 8 | 9 | image1 -> spritesheet_generator 10 | image2 -> spritesheet_generator 11 | image3 -> spritesheet_generator 12 | spritesheet_generator -> intermediate1 -> platform_backend 13 | 14 | } -------------------------------------------------------------------------------- /docs/graphics/processing_example_spritesheet.svg: -------------------------------------------------------------------------------- 1 | 2 | 4 | 6 | 7 | 9 | 10 | import 11 | 12 | 13 | image1 14 | 15 | Asset Intermediate Format 16 | 17 | Image 1 18 | 19 | 20 | spritesheet_generator 21 | 22 | Spritesheet Generator 23 | 24 | 25 | image1->spritesheet_generator 26 | 27 | 28 | 29 | 30 | image2 31 | 32 | Asset Intermediate Format 33 | 34 | Image 2 35 | 36 | 37 | image2->spritesheet_generator 38 | 39 | 40 | 41 | 42 | image3 43 | 44 | Asset Intermediate Format 45 | 46 | Image 3 47 | 48 | 49 | image3->spritesheet_generator 50 | 51 | 52 | 53 | 54 | intermediate1 55 | 56 | Asset Intermediate Format 57 | 58 | Spritesheet 59 | 60 | 61 | spritesheet_generator->intermediate1 62 | 63 | 64 | 65 | 66 | platform_backend 67 | 68 | Spritesheet Platform Backend 69 | 70 | 71 | intermediate1->platform_backend 72 | 73 | 74 | 75 | 76 | 77 | -------------------------------------------------------------------------------- /docs/graphics/ux/pipeline_ux_meshopt.dot: -------------------------------------------------------------------------------- 1 | digraph import { 2 | rankdir=LR 3 | pipeline_inputs [shape=record, label="Pipeline Inputs|Mesh"]; 4 | pipeline_output [shape=record, label="Pipeline Output|Inferred"]; 5 | mesh_index [shape=record, label="Index Mesh Vertices|{in: Mesh|out: Mesh}"]; 6 | vertex_cache_opt [shape=record, label="Vertex Cache Optimization|{in: Mesh|out: Mesh}"]; 7 | overdraw_optimization [shape=record, label="Overdraw Optimization|{in: Mesh|out: Mesh}"]; 8 | vertex_quantization [shape=record, label="Vertex Quantization|{in: Mesh|out: Mesh}"]; 9 | pipeline_inputs:out -> mesh_index:in 10 | mesh_index:out -> vertex_cache_opt:in 11 | vertex_cache_opt:out -> overdraw_optimization:in 12 | overdraw_optimization:out -> vertex_quantization:in 13 | vertex_quantization:out -> pipeline_output:in 14 | } -------------------------------------------------------------------------------- /docs/graphics/ux/pipeline_ux_meshopt.svg: -------------------------------------------------------------------------------- 1 | 2 | 4 | 6 | 7 | 9 | 10 | import 11 | 12 | 13 | pipeline_inputs 14 | 15 | Pipeline Inputs 16 | 17 | Mesh 18 | 19 | 20 | mesh_index 21 | 22 | Index Mesh Vertices 23 | 24 | in: Mesh 25 | 26 | out: Mesh 27 | 28 | 29 | pipeline_inputs:out->mesh_index:in 30 | 31 | 32 | 33 | 34 | pipeline_output 35 | 36 | Pipeline Output 37 | 38 | Inferred 39 | 40 | 41 | vertex_cache_opt 42 | 43 | Vertex Cache Optimization 44 | 45 | in: Mesh 46 | 47 | out: Mesh 48 | 49 | 50 | mesh_index:out->vertex_cache_opt:in 51 | 52 | 53 | 54 | 55 | overdraw_optimization 56 | 57 | Overdraw Optimization 58 | 59 | in: Mesh 60 | 61 | out: Mesh 62 | 63 | 64 | vertex_cache_opt:out->overdraw_optimization:in 65 | 66 | 67 | 68 | 69 | vertex_quantization 70 | 71 | Vertex Quantization 72 | 73 | in: Mesh 74 | 75 | out: Mesh 76 | 77 | 78 | overdraw_optimization:out->vertex_quantization:in 79 | 80 | 81 | 82 | 83 | vertex_quantization:out->pipeline_output:in 84 | 85 | 86 | 87 | 88 | 89 | -------------------------------------------------------------------------------- /docs/graphics/ux/pipeline_ux_spritesheet_gen_with_list.dot: -------------------------------------------------------------------------------- 1 | digraph import { 2 | rankdir=LR 3 | pipeline_inputs [shape=record, label="Pipeline Inputs|List\"]; 4 | pipeline_output [shape=record, label="Pipeline Output|Inferred"]; 5 | new_asset_uuid [shape=record, label="Generated AssetID"]; 6 | spritesheet_gen [shape=record, label="Spritesheet Generator|{{id: AssetUUID|in: List\}|{spritesheet: Spritesheet|sprites: List\}}"]; 7 | subgraph cluster_process_list { 8 | style=filled; 9 | color=lightgrey; 10 | iter [shape=record, label="iter_in: Inferred|iter_out: Inferred"]; 11 | map_input [shape=record, label="{in: List\|out: List\}"]; 12 | linear_colorspace [shape=record, label="Linear Colorspace|{in: Image|out: Image}"]; 13 | iter:in -> linear_colorspace:in 14 | linear_colorspace:out -> iter:out 15 | label="Process List"; 16 | } 17 | pipeline_inputs:out -> map_input:in 18 | map_input:out -> spritesheet_gen:in 19 | new_asset_uuid -> spritesheet_gen:id_in 20 | spritesheet_gen:out -> pipeline_output:in 21 | spritesheet_gen:sprites_out -> pipeline_output:in 22 | } -------------------------------------------------------------------------------- /docs/graphics/ux/pipeline_ux_spritesheet_gen_with_list.svg: -------------------------------------------------------------------------------- 1 | 2 | 4 | 6 | 7 | 9 | 10 | import 11 | 12 | cluster_process_list 13 | 14 | Process List 15 | 16 | 17 | pipeline_inputs 18 | 19 | Pipeline Inputs 20 | 21 | List<Image> 22 | 23 | 24 | map_input 25 | 26 | in: List<Inferred> 27 | 28 | out: List<Inferred> 29 | 30 | 31 | pipeline_inputs:out->map_input:in 32 | 33 | 34 | 35 | 36 | pipeline_output 37 | 38 | Pipeline Output 39 | 40 | Inferred 41 | 42 | 43 | new_asset_uuid 44 | 45 | Generated AssetID 46 | 47 | 48 | spritesheet_gen 49 | 50 | Spritesheet Generator 51 | 52 | id: AssetUUID 53 | 54 | in: List<Image> 55 | 56 | spritesheet: Spritesheet 57 | 58 | sprites: List<Sprite> 59 | 60 | 61 | new_asset_uuid->spritesheet_gen:id_in 62 | 63 | 64 | 65 | 66 | spritesheet_gen:out->pipeline_output:in 67 | 68 | 69 | 70 | 71 | spritesheet_gen:sprites_out->pipeline_output:in 72 | 73 | 74 | 75 | 76 | iter 77 | 78 | iter_in: Inferred 79 | 80 | iter_out: Inferred 81 | 82 | 83 | linear_colorspace 84 | 85 | Linear Colorspace 86 | 87 | in: Image 88 | 89 | out: Image 90 | 91 | 92 | iter:in->linear_colorspace:in 93 | 94 | 95 | 96 | 97 | map_input:out->spritesheet_gen:in 98 | 99 | 100 | 101 | 102 | linear_colorspace:out->iter:out 103 | 104 | 105 | 106 | 107 | 108 | -------------------------------------------------------------------------------- /docs/graphics/ux/pipeline_ux_tex_combine.dot: -------------------------------------------------------------------------------- 1 | digraph import { 2 | rankdir=LR 3 | pipeline_inputs [shape=record, label="Pipeline Inputs|{{r: Image|g: Image|b: Image|a: Image}}"]; 4 | pipeline_output [shape=record, label="Pipeline Output\nAMBIGUOUS ASSETUUID|Inferred"]; 5 | image_combine [shape=record, label="Image From Channels|{{r: Image|g: Image|b: Image|a: Image}|out: Image}"]; 6 | channel_select [shape=record, label="Channel Select\lconstant input: a [rgba/0-9]|{in: Image|out: Image}"] 7 | grayscale [shape=record, label="Grayscale|{in: Image|out: Image}"]; 8 | linear_colorspace [shape=record, label="Linear Colorspace|{in: Image|out: Image}"]; 9 | pipeline_inputs:out_r -> grayscale:in 10 | grayscale:out -> image_combine:in_r 11 | pipeline_inputs:out_g -> channel_select:in 12 | channel_select:out -> image_combine:in_g 13 | pipeline_inputs:out_b -> image_combine:in_b 14 | pipeline_inputs:out_a -> image_combine:in_a 15 | image_combine:out -> linear_colorspace:in 16 | linear_colorspace:out -> pipeline_output:in 17 | } -------------------------------------------------------------------------------- /docs/graphics/ux/pipeline_ux_tex_combine.svg: -------------------------------------------------------------------------------- 1 | 2 | 4 | 6 | 7 | 9 | 10 | import 11 | 12 | 13 | pipeline_inputs 14 | 15 | Pipeline Inputs 16 | 17 | r: Image 18 | 19 | g: Image 20 | 21 | b: Image 22 | 23 | a: Image 24 | 25 | 26 | image_combine 27 | 28 | Image From Channels 29 | 30 | r: Image 31 | 32 | g: Image 33 | 34 | b: Image 35 | 36 | a: Image 37 | 38 | out: Image 39 | 40 | 41 | pipeline_inputs:out_b->image_combine:in_b 42 | 43 | 44 | 45 | 46 | pipeline_inputs:out_a->image_combine:in_a 47 | 48 | 49 | 50 | 51 | channel_select 52 | 53 | Channel Select 54 | constant input: a [rgba/0-9] 55 | 56 | in: Image 57 | 58 | out: Image 59 | 60 | 61 | pipeline_inputs:out_g->channel_select:in 62 | 63 | 64 | 65 | 66 | grayscale 67 | 68 | Grayscale 69 | 70 | in: Image 71 | 72 | out: Image 73 | 74 | 75 | pipeline_inputs:out_r->grayscale:in 76 | 77 | 78 | 79 | 80 | pipeline_output 81 | 82 | Pipeline Output 83 | AMBIGUOUS ASSETUUID 84 | 85 | Inferred 86 | 87 | 88 | linear_colorspace 89 | 90 | Linear Colorspace 91 | 92 | in: Image 93 | 94 | out: Image 95 | 96 | 97 | image_combine:out->linear_colorspace:in 98 | 99 | 100 | 101 | 102 | channel_select:out->image_combine:in_g 103 | 104 | 105 | 106 | 107 | grayscale:out->image_combine:in_r 108 | 109 | 110 | 111 | 112 | linear_colorspace:out->pipeline_output:in 113 | 114 | 115 | 116 | 117 | 118 | -------------------------------------------------------------------------------- /docs/graphics/ux/pipeline_ux_tex_srgb_simple.dot: -------------------------------------------------------------------------------- 1 | digraph import { 2 | rankdir=LR 3 | pipeline_inputs [shape=record, label="Pipeline Inputs|Image"]; 4 | pipeline_output [shape=record, label="Pipeline Output|Inferred"]; 5 | linear_colorspace [shape=record, label="Linear Colorspace|{in: Image|out: Image}"]; 6 | pipeline_inputs:out -> linear_colorspace:in 7 | linear_colorspace:out -> pipeline_output:in 8 | } -------------------------------------------------------------------------------- /docs/graphics/ux/pipeline_ux_text_srgb_simple.svg: -------------------------------------------------------------------------------- 1 | 2 | 4 | 6 | 7 | 9 | 10 | import 11 | 12 | 13 | pipeline_inputs 14 | 15 | Pipeline Inputs 16 | 17 | Image 18 | 19 | 20 | linear_colorspace 21 | 22 | Linear Colorspace 23 | 24 | in: Image 25 | 26 | out: Image 27 | 28 | 29 | pipeline_inputs:out->linear_colorspace:in 30 | 31 | 32 | 33 | 34 | pipeline_output 35 | 36 | Pipeline Output 37 | 38 | Inferred 39 | 40 | 41 | linear_colorspace:out->pipeline_output:in 42 | 43 | 44 | 45 | 46 | 47 | -------------------------------------------------------------------------------- /docs/rfc/assetid.md: -------------------------------------------------------------------------------- 1 | # Table of Contents 2 | 3 | - [Motivation](#motivation) 4 | - [Guide Level Explanation](#guide-level-explanation) 5 | - [Reference Level Explanation](#reference-level-explanation) 6 | - [Drawbacks](#drawbacks) 7 | - [Rationale and Alternatives](#rationale-and-alternatives) 8 | - [Prior Art](#prior-art) 9 | - [Unresolved Questions](#unresolved-questions) 10 | 11 | # Basic Info 12 | [basic]: #basic-info 13 | 14 | - Feature Name: asset_id 15 | - Start Date: 2018-11-20 16 | - RFC PR: 17 | - [Tracking Issue](#tracking-issue): 18 | - [Forum Thread](#forum-discussion): 19 | 20 | # Summary 21 | [summary]: #summary 22 | 23 | AssetID is a unified way to reference any loadable asset in Amethyst. It proposes an AssetID enum with variants for v4 UUID (AssetUUID), a file path (PathBuf) or a custom URI format (see [Unresolved Questions](#unresolved-questions)). 24 | 25 | Note that this is the first part in a series of RFCs I intend to write that details the technical aspects of my earlier [Asset Pipeline](https://github.com/amethyst/amethyst/issues/875) proposal. 26 | 27 | # Motivation 28 | [motivation]: #motivation 29 | 30 | To realize the vision of the Asset Pipeline it will be important to be able to address loadable assets in a more powerful way than with a filesystem path. 31 | Container formats such as GLTF and FBX can benefit from AssetID where an ID can be generated for each loadable asset within the file as opposed to referencing the entire source file. 32 | It is also essential that cross-file references use UUID for Amethyst to handle renaming or moving of files without breaking these references. 33 | 34 | The primary benefit of AssetID is to make it easier to write tools for Amethyst that handle assets. With a unified system for identifying assets, we can build a unified system for loading assets or metadata about assets which is super useful when creating editors and visualisers. 35 | 36 | # Guide-Level Explanation 37 | [guide-level-explanation]: #guide-level-explanation 38 | 39 | AssetID is an identifier for loadable assets. It is a name and enum for any of three variants. 40 | AssetUUID - Globally/universally unique identifier for an asset - v4 UUID. Generated by the Asset Pipeline when an asset is imported. Primary way to reference assets for tools and formats that are not edited by humans. 41 | FilePath - The current way of referencing assets. It will still be supported in scenarios where it is possible. 42 | URI - A way for users to plug in their own custom asset referencing scheme along with their own resolving systems. 43 | 44 | I expect users that write code to primarily reference assets using the FilePath variant. References in data, for example those that are created by a Prefab editor when referencing meshes or textures would use UUID references. This is closer to the expectation of the user since the user did not specify a specific path, but a reference to an asset. 45 | 46 | If a user created a prefab using a prefab editor and references an asset in a file that was later renamed or moved, it would be unexpected for the reference to no longer resolve since the user did not specify a path in the prefab editor. In the case of loading an asset by file path in code, it would be be expected behaviour for the load to fail if a file is removed however. 47 | 48 | URI is a general way of referencing an asset that does not exist in the filesystem and was not imported by the asset pipeline. An example of this could be an asset stored in a database somewhere that is specific to the game it is resolved in. URIs are included in AssetID as a point of further extension for game developers and will not be used by any of the initial Asset Pipeline systems. 49 | The intention is to provide traits that can be implemented to make it easier to integrate custom IDs into the asset loading process. 50 | 51 | # Reference-Level Explanation 52 | [reference-level-explanation]: #reference-level-explanation 53 | 54 | I propose to define the following in amethyst::assets. 55 | ``` 56 | pub type AssetUUID = [u8; 16]; 57 | #[derive(Clone, Serialize, Deserialize, Hash)] 58 | pub enum AssetID { 59 | UUID(AssetUUID), 60 | FilePath(PathBuf), 61 | URI(URI), 62 | } 63 | ``` 64 | 65 | When an asset is imported from a file, an AssetUUID is generated for it and maintained by the Asset Hub daemon in metadata (see other, future RFC). 66 | 67 | Once the Asset Pipeline proposal is fully realized, the AssetID will be used in various parts of the engine/asset pipeline. I don't believe there is value in implementing it before other parts of the Asset Pipeline is ready. 68 | 69 | ### Import 70 | Each source file can be imported independently in parallel. When importing a single source file, possibly multiple resulting assets will be assigned an AssetUUID each and they may have AssetID dependencies of three kinds: Build, load and instantiate. 71 | In some cases these dependencies will be defined with the FilePath variant. After the import completes but before persisting metadata, the Asset Pipeline will attempt to resolve the FilePath dependencies of these assets to their AssetUUID equivalents. If the path cannot be resolved, the import will result in an error. This will ensure that the Asset Pipeline and all tools only have to handle stable AssetUUID references that map to specific assets regardless of the filesystem state. 72 | 73 | Attentive readers may have realized that a problem occurs in the case of a FilePath reference to a source file with multiple imported assets in them, such as a GLTF file. I propose that the Importer can specify a "main" asset that is to be used for the FilePath->Asset resolving process specified in [Load](#load) 74 | 75 | ### Build 76 | When an asset is built for a target platform, the build dependencies of the asset will be included for the Builder to consume. 77 | 78 | An example of this is shader #include statements. In the import step, the Importer can parse the shader for all include statements and convert these to AssetIDs. The builder will then receive both the source shader and its entire dependency set, including dependencies of dependencies recursively. 79 | This allows the Builder to be a pure function which makes it a lot easier to implement things like build caching and distributed building. 80 | 81 | ### Load 82 | [load]: #load 83 | All loading will reference assets using AssetID. Different Resolver implementations will handle resolving an AssetID to a loadable asset blob depending on the runtime environment and the AssetID variant. The Resolver replaces the existing Source trait as a more general solution. 84 | 85 | For example, in a development environment the Resolver will probably connect to AssetHub to resolve the AssetID to a build artifact hash based on the target platform, then load the build artifact and return it to the caller. 86 | 87 | In a distributable build with optimized packed of assets, the Resolver may have an in-memory map of AssetUUID->FileLocation for knowing where an asset can be loaded from. It then performs the file read and returns the build artifact. For FilePath variants, the Resolver may have an in-memory map of FilePath->AssetUUID it uses before running the AssetUUID resolving process. 88 | In the case of modding or asset overrides, a completely custom Resolver may be written that maintains a map of AssetUUID->FilePath to see if the user has provided a file override. If not, the resolver fallbacks to the resolver for packed assets. 89 | 90 | # Drawbacks 91 | [drawbacks]: #drawbacks 92 | 93 | The drawbacks compared to no change at all are possibly increased complexity? 94 | 95 | # Rationale, Alternatives and Prior Art 96 | [rationale-and-alternatives]: #rationale-and-alternatives 97 | 98 | This design for an asset identifier is primarily inspired by the Unity asset management system and most of the advantages can be seen by looking at how their builds work. The most important improvement upon their design being the following: 99 | - Unity defines a UUID to reference a source file, then uses a "file-internal ID" to reference specific assets. This proposal improves upon that design with a general design that enables referencing specific assets within a source file with 16 bytes instead of 16 + 4 bytes. 100 | [Reference here](https://docs.unity3d.com/ScriptReference/Build.Content.ObjectIdentifier.html) 101 | 102 | The design is in my opinion superior to Godot, Unreal and Crytek asset management systems as their asset references use paths, leading to issues with renaming or moving files. Unreal's solution to moving/renaming only works if you move the file in the editor: put a "Redirector" at the path of a moved or renamed file. [Source here](https://docs.unrealengine.com/en-us/Engine/Basics/Redirectors) 103 | 104 | # Unresolved Questions 105 | [unresolved-questions]: #unresolved-questions 106 | 107 | An unresolved question I have is whether URI should be included at all (not needed for most common use case, but has been requested previously) and if it is to be included, which URI library should be used. -------------------------------------------------------------------------------- /docs/rfc/overview.md: -------------------------------------------------------------------------------- 1 | I've been working on an implementation of the ideas in the Asset Pipeline RFC lately and I now feel ready to present some technical details and concrete proposals. 2 | 3 | # Table of Contents 4 | 5 | - [Tracking Issues](#tracking-issue) 6 | - [Amethyst Community Forum Discussion](#forum-discussion) 7 | - [Motivation] 8 | - [Guide Level Explanation](#guide-level-explanation) 9 | - [Reference Level Explanation](#reference-level-explanation) 10 | - [Drawbacks] 11 | - [Rationale and Alternatives](#rationale-and-alternatives) 12 | - [Prior Art](#prior-art) 13 | - [Unresolved Questions](#unresolved-questions) 14 | 15 | # Basic Info 16 | [basic]: #basic-info 17 | 18 | - Feature Name: asset_pipeline 19 | - Start Date: (fill me in with today's date, YYYY-MM-DD) 20 | - RFC PR: (leave this empty until a PR is opened) 21 | - [Tracking Issue](#tracking-issue): (leave this empty) 22 | - [Forum Thread](#forum-discussion): (if there is one) 23 | 24 | # Summary 25 | [summary]: #summary 26 | 27 | With the asset pipeline we introduce an "offline" workflow for importing source files and building assets to be loaded in the Amethyst engine. It aims to enable a number of features. 28 | - Hot reloading of any asset 29 | - Deterministic builds and build artifact caching 30 | - Asset dependency graphs 31 | - Offline optimization and preprocessing of assets with settings specific to each target platform 32 | - Scalable importing and building that can take advantage of available computing resources 33 | - Searching and querying based on tags extracted by asset importers 34 | - Moving and renaming assets without breaking references 35 | 36 | The required changes to Amethyst as an engine are comprehensive but finite. To make things easier I will try to split the RFC into a number of smaller RFCs that describe specific changes. These changes may have dependencies on each other and some may not make sense to implement independently, but are broken up to make digesting them easier. 37 | 38 | RFCs 39 | - AssetID - unified system of referencing any individual asset that is loaded from a source file 40 | - Format rework - new traits to replace assets::Format: Importer and Builder that generate metadata and enable deterministic builds and caching of build artifacts 41 | - TypeUUID and type registry - dynamic serialization and deserialization using type_uuid and a type registry 42 | - Reflection - runtime fixup of AssetID -> Handle at deserialize time 43 | - AssetHub - daemon/service that runs on development machines and watches project directories for changes, performs imports and maintains metadata 44 | - RPC - protocol for communication between tooling (AssetHub primarily) and engine 45 | - Asset packing - packing assets into a format that is suitable for distribution 46 | - Asset loading - new AssetLoader implementation that works with AssetIDs and Handles 47 | - Prefab rework - new implementation of Prefabs that is defined in terms of components and asset references instead of embedding assets in a prefab. 48 | - GLTF Importer - new implementation of gltf importer that extracts individually addressable assets (including prefab) 49 | - RON prefab - new implementation of ron prefab importer that uses AssetIDs 50 | - Configs - use AssetIDs and new loader to load configs in addition to content assets 51 | 52 | 53 | ## Amethyst Community Forum Discussion 54 | You can access our forums at https://community.amethyst-engine.org 55 | 56 | [forum-discussion]: #forum-discussion 57 |
58 | Information about pre-RFC discussion on our community forum 59 | There is a category on our forums for what can be considered pre-RFC discussion. It is a good place to get some quick feedback from the community without having to go through the entire process. 60 | 61 | This is not required, but if one exists and contains useful information, you may place a link to it here. 62 |
63 | 64 | # Motivation 65 | [motivation]: #motivation 66 | Why are we doing this? What use cases does it support? What is the expected outcome? 67 | 68 | # Guide-Level Explanation 69 | [guide-level-explanation]: #guide-level-explanation 70 |
71 | 72 | Non-technical overview and reasoning for it. 73 | Explain the proposal as if it was already included in the language and you were teaching it to another Amethyst programmer. That generally means: 74 | 75 | - Introducing new named concepts. 76 | - Explaining the feature largely in terms of examples. 77 | - Explaining how Amethyst developers should *think* about the feature, and how it should impact the way they use Amethyst. It should explain the impact as concretely as possible. 78 | - If applicable, provide sample error messages, deprecation warnings, or migration guidance. 79 | - If applicable, describe the differences between teaching this to existing Amethyst programmers and new Amethyst programmers. 80 | 81 | For implementation-oriented RFCs (e.g. for changes to the engine), this section should focus on how engine contributors should think about the change, and give examples of its concrete impact. For policy RFCs, this section should provide an example-driven introduction to the policy, and explain its impact in concrete terms. 82 |
83 | 84 | # Reference-Level Explanation 85 | [reference-level-explanation]: #reference-level-explanation 86 |
87 | The technical details and design of the RFC. 88 | This is the technical portion of the RFC. Explain the design in sufficient detail that: 89 | 90 | - Its interaction with other features is clear. 91 | - It is reasonably clear how the feature would be implemented. 92 | - Corner cases are dissected by example. 93 | 94 | The section should return to the examples given in the previous section, and explain more fully how the detailed proposal makes those examples work. 95 |
96 | 97 | # Drawbacks 98 | [drawbacks]: #drawbacks 99 | 100 | Why should we *not* do this? 101 | 102 | # Rationale and Alternatives 103 | [rationale-and-alternatives]: #rationale-and-alternatives 104 | 105 | - Why is this design the best in the space of possible designs? 106 | - What other designs have been considered and what is the rationale for not choosing them? 107 | - What is the impact of not doing this? 108 | 109 | # Prior Art 110 | [prior-art]: #prior-art 111 |
112 | Discuss previous attempts, both good and bad, and how they relate to this proposal. 113 | A few examples of what this can include are: 114 | 115 | - For engine, network, web, and rendering proposals: Does this feature exist in other engines and what experience has their community had? 116 | - For community proposals: Is this done by some other community and what were their experiences with it? 117 | - For other teams: What lessons can we learn from what other communities have done here? 118 | - Papers: Are there any published papers or great posts that discuss this? If you have some relevant papers to refer to, this can serve as a more detailed theoretical background. 119 | 120 | This section is intended to encourage you as an author to think about the lessons from other engines, provide readers of your RFC with a fuller picture. 121 | If there is no prior art, that is fine - your ideas are interesting to us whether they are brand new or if it is an adaptation from other engines. 122 |
123 | 124 | # Unresolved Questions 125 | [unresolved-questions]: #unresolved-questions 126 |
127 | Additional questions to consider 128 | 129 | - What parts of the design do you expect to resolve through the RFC process before this gets merged? 130 | - What parts of the design do you expect to resolve through the implementation of this feature before stabilization? 131 | - What related issues do you consider out of scope for this RFC that could be addressed in the future independently of the solution that comes out of this RFC? 132 |
133 | 134 | Copyright 2018 Amethyst Developers -------------------------------------------------------------------------------- /docs/source_overview.md: -------------------------------------------------------------------------------- 1 | ## Modules 2 | 3 | ### DirWatcher 4 | Provides filesystem events with cross-platform support using the glorious crate [notify](https://docs.rs/notify/4.0.6/notify/). DirWatcher follows symlinks and supports watching multiple directories, providing events through a crossbeam_channel. 5 | 6 | ### capnp_db 7 | Layers capnproto serialization on top of LMDB for zero-copy reads and a nicer API for using capnproto messages as keys and/or values. 8 | 9 | ### FileTracker 10 | Receives filesystem events from DirWatcher and indexes the last seen filesystem state in a DB to provide a consistent view. Can provide real change events based on filesystem modification time and length even if a change was performed when the daemon was not active. It also maintains a set of "dirty files" to be consumed by FileAssetSource to ensure that changes always can be processed, even in the case of a crash. 11 | 12 | ### AssetHub 13 | Indexes asset metadata by AssetID. 14 | 15 | ### FileAssetSource 16 | Receives change events from FileTracker and performs processing on changed pairs. 17 | 1. Reads the dirty_files set from FileTracker 18 | 2. Fixes up Path->\[AssetID\] DB index based on renames 19 | 3. Creates a source file:meta pairing for each dirty file 20 | 4. Hashes the source and meta file pairs 21 | 5. "Imports" the pair: deserializes the meta file, then runs stuff in asset_import based on file extension. This produces a list of assets from the source file. 22 | 6. Indexes Path->\[AssetID\] and reverse 23 | 7. Tells AssetHub about any updated or removed assets 24 | 25 | ### AssetHubService 26 | Provides interactive snapshots and transactions for metadata in the various modules over `capnproto-rpc`. -------------------------------------------------------------------------------- /examples/daemon_with_loader/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "daemon_with_loader" 3 | version = "0.0.1" 4 | authors = ["Karl Bergström "] 5 | edition = "2018" 6 | readme = "README.md" 7 | license = "MIT/Apache-2.0" 8 | publish = false 9 | 10 | [dependencies] 11 | distill = { version = "=0.0.3", path = "../../", features = ["type_uuid", "distill-daemon", "distill-importer"] } 12 | image2 = { version = "0.11", features = ["ser"] } 13 | log = { version = "0.4", features = ["serde"] } 14 | serde = "1" 15 | uuid = "0.8.2" 16 | bincode = "1.3.1" 17 | futures-util = { version = "0.3", default-features = false } 18 | futures-io = { version = "0.3", default-features = false } 19 | futures-core = { version = "0.3", default-features = false, features = ["alloc"] } 20 | -------------------------------------------------------------------------------- /examples/daemon_with_loader/assets/amethyst.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/amethyst/distill/852e48272ab3dad76c7490fce8df52461f94ebc0/examples/daemon_with_loader/assets/amethyst.png -------------------------------------------------------------------------------- /examples/daemon_with_loader/assets/amethyst.png.meta: -------------------------------------------------------------------------------- 1 | ( 2 | version: 2, 3 | importer_options: (), 4 | importer_state: (Some("6c5ae1ad-ae30-471b-985b-7d017265f19f")), 5 | ) -------------------------------------------------------------------------------- /examples/daemon_with_loader/src/game.rs: -------------------------------------------------------------------------------- 1 | use std::{collections::HashMap, error::Error}; 2 | 3 | use distill::{ 4 | core::type_uuid::TypeUuid, 5 | loader::{ 6 | loader::Loader, 7 | storage::{ 8 | AssetLoadOp, AssetStorage, DefaultIndirectionResolver, IndirectionTable, LoadHandle, 9 | LoadStatus, LoaderInfoProvider, 10 | }, 11 | AssetTypeId, RpcIO, 12 | }, 13 | }; 14 | 15 | use crate::image::Image; 16 | 17 | #[allow(dead_code)] 18 | struct AssetState
{ 19 | version: u32, 20 | asset: A, 21 | } 22 | pub struct Storage { 23 | assets: HashMap>, 24 | uncommitted: HashMap>, 25 | indirection_table: IndirectionTable, 26 | } 27 | impl Storage { 28 | fn new(indirection_table: IndirectionTable) -> Self { 29 | Self { 30 | assets: HashMap::new(), 31 | uncommitted: HashMap::new(), 32 | indirection_table, 33 | } 34 | } 35 | 36 | pub fn get_asset(&self, handle: LoadHandle) -> Option<&A> { 37 | let handle = if handle.is_indirect() { 38 | self.indirection_table.resolve(handle)? 39 | } else { 40 | handle 41 | }; 42 | let asset = self.assets.get(&handle); 43 | asset.map(|state| &state.asset) 44 | } 45 | } 46 | // Implementation of AssetStorage for the typed storage 47 | impl serde::Deserialize<'a>> AssetStorage for Storage { 48 | fn update_asset( 49 | &mut self, 50 | _loader_info: &dyn LoaderInfoProvider, 51 | _asset_type_id: &AssetTypeId, 52 | data: Vec, 53 | load_handle: LoadHandle, 54 | load_op: AssetLoadOp, 55 | version: u32, 56 | ) -> Result<(), Box> { 57 | self.uncommitted.insert( 58 | load_handle, 59 | AssetState { 60 | asset: bincode::deserialize::(&data).expect("failed to deserialize asset"), 61 | version, 62 | }, 63 | ); 64 | log::info!("{} bytes loaded for {:?}", data.len(), load_handle); 65 | // The loading process could be async, in which case you can delay 66 | // calling `load_op.complete` as it should only be done when the asset is usable. 67 | load_op.complete(); 68 | Ok(()) 69 | } 70 | 71 | fn commit_asset_version( 72 | &mut self, 73 | _asset_type: &AssetTypeId, 74 | load_handle: LoadHandle, 75 | _version: u32, 76 | ) { 77 | // The commit step is done after an asset load has completed. 78 | // It exists to avoid frames where an asset that was loaded is unloaded, which 79 | // could happen when hot reloading. To support this case, you must support having multiple 80 | // versions of an asset loaded at the same time. 81 | self.assets.insert( 82 | load_handle, 83 | self.uncommitted 84 | .remove(&load_handle) 85 | .expect("asset not present when committing"), 86 | ); 87 | log::info!("Commit {:?}", load_handle); 88 | } 89 | 90 | fn free(&mut self, _asset_type_id: &AssetTypeId, load_handle: LoadHandle, version: u32) { 91 | if let Some(asset) = self.uncommitted.get(&load_handle) { 92 | if asset.version == version { 93 | self.uncommitted.remove(&load_handle); 94 | } 95 | } 96 | if let Some(asset) = self.assets.get(&load_handle) { 97 | if asset.version == version { 98 | self.assets.remove(&load_handle); 99 | } 100 | } 101 | log::info!("Free {:?}", load_handle); 102 | } 103 | } 104 | struct Game { 105 | storage: HashMap>, 106 | } 107 | 108 | // Untyped implementation of AssetStorage that finds the asset_type's storage and forwards the call 109 | impl AssetStorage for Game { 110 | fn update_asset( 111 | &mut self, 112 | loader_info: &dyn LoaderInfoProvider, 113 | asset_type_id: &AssetTypeId, 114 | data: Vec, 115 | load_handle: LoadHandle, 116 | load_op: AssetLoadOp, 117 | version: u32, 118 | ) -> Result<(), Box> { 119 | self.storage 120 | .get_mut(asset_type_id) 121 | .expect("unknown asset type") 122 | .update_asset( 123 | loader_info, 124 | asset_type_id, 125 | data, 126 | load_handle, 127 | load_op, 128 | version, 129 | ) 130 | } 131 | 132 | fn commit_asset_version( 133 | &mut self, 134 | asset_type: &AssetTypeId, 135 | load_handle: LoadHandle, 136 | version: u32, 137 | ) { 138 | self.storage 139 | .get_mut(asset_type) 140 | .expect("unknown asset type") 141 | .commit_asset_version(asset_type, load_handle, version) 142 | } 143 | 144 | fn free(&mut self, asset_type_id: &AssetTypeId, load_handle: LoadHandle, version: u32) { 145 | self.storage 146 | .get_mut(asset_type_id) 147 | .expect("unknown asset type") 148 | .free(asset_type_id, load_handle, version) 149 | } 150 | } 151 | 152 | pub fn run() { 153 | let mut game = Game { 154 | storage: HashMap::new(), 155 | }; 156 | let mut loader = Loader::new(Box::new(RpcIO::default())); 157 | // Create storage for Image type 158 | game.storage.insert( 159 | AssetTypeId(Image::UUID), 160 | Box::new(Storage::::new(loader.indirection_table())), 161 | ); 162 | 163 | let handle = loader.add_ref("6c5ae1ad-ae30-471b-985b-7d017265f19f"); 164 | loop { 165 | loader 166 | .process(&mut game, &DefaultIndirectionResolver) 167 | .expect("failed to process loader"); 168 | if let LoadStatus::Loaded = loader.get_load_status(handle) { 169 | break; 170 | } 171 | } 172 | // The basic API uses explicit reference counting. 173 | // Integrate with distill_loader::handle for automatic reference counting! 174 | loader.remove_ref(handle); 175 | loop { 176 | loader 177 | .process(&mut game, &DefaultIndirectionResolver) 178 | .expect("failed to process loader"); 179 | if let LoadStatus::NotRequested = loader.get_load_status(handle) { 180 | break; 181 | } 182 | } 183 | } 184 | -------------------------------------------------------------------------------- /examples/daemon_with_loader/src/image.rs: -------------------------------------------------------------------------------- 1 | use distill::{ 2 | core::{type_uuid, type_uuid::TypeUuid, AssetUuid}, 3 | importer::{AsyncImporter, Error, ImportOp, ImportedAsset, ImporterValue, Result}, 4 | }; 5 | use futures_core::future::BoxFuture; 6 | use futures_io::AsyncRead; 7 | use futures_util::AsyncReadExt; 8 | use image2::{color, ImageBuf}; 9 | use serde::{Deserialize, Serialize}; 10 | 11 | #[derive(TypeUuid, Serialize, Deserialize)] 12 | #[uuid = "d4079e74-3ec9-4ebc-9b77-a87cafdfdada"] 13 | pub enum Image { 14 | Rgb8(ImageBuf), 15 | // ... 16 | } 17 | 18 | #[derive(TypeUuid, Serialize, Deserialize, Default)] 19 | #[uuid = "3c8367c8-45fb-40bb-a229-00e5e9c3fc70"] 20 | pub struct SimpleState(Option); 21 | #[derive(TypeUuid)] 22 | #[uuid = "720d636b-b79c-42d4-8f46-a2d8e1ada46e"] 23 | pub struct ImageImporter; 24 | impl AsyncImporter for ImageImporter { 25 | type Options = (); 26 | type State = SimpleState; 27 | 28 | fn version_static() -> u32 29 | where 30 | Self: Sized, 31 | { 32 | 1 33 | } 34 | 35 | fn version(&self) -> u32 { 36 | Self::version_static() 37 | } 38 | 39 | /// Reads the given bytes and produces assets. 40 | fn import<'a>( 41 | &'a self, 42 | _op: &'a mut ImportOp, 43 | source: &'a mut (dyn AsyncRead + Unpin + Send + Sync), 44 | _options: &Self::Options, 45 | state: &'a mut Self::State, 46 | ) -> BoxFuture<'a, Result> { 47 | Box::pin(async move { 48 | let id = state 49 | .0 50 | .unwrap_or_else(|| AssetUuid(*uuid::Uuid::new_v4().as_bytes())); 51 | *state = SimpleState(Some(id)); 52 | let mut bytes = Vec::new(); 53 | source.read_to_end(&mut bytes).await?; 54 | let asset = 55 | Image::Rgb8(image2::io::decode(&bytes).map_err(|e| Error::Boxed(Box::new(e)))?); 56 | Ok(ImporterValue { 57 | assets: vec![ImportedAsset { 58 | id, 59 | search_tags: vec![], 60 | build_deps: vec![], 61 | load_deps: vec![], 62 | build_pipeline: None, 63 | asset_data: Box::new(asset), 64 | }], 65 | }) 66 | }) 67 | } 68 | } 69 | -------------------------------------------------------------------------------- /examples/daemon_with_loader/src/main.rs: -------------------------------------------------------------------------------- 1 | mod game; 2 | mod image; 3 | use std::path::PathBuf; 4 | 5 | use distill::daemon::{init_logging, AssetDaemon}; 6 | pub use game::Storage; 7 | 8 | fn main() { 9 | init_logging().expect("failed to init logging"); 10 | std::thread::spawn(move || { 11 | AssetDaemon::default() 12 | .with_importer(&["png"], crate::image::ImageImporter) 13 | .with_db_path(".assets_db") 14 | .with_address("127.0.0.1:9999".parse().unwrap()) 15 | .with_asset_dirs(vec![PathBuf::from("assets")]) 16 | .run(); 17 | }); 18 | game::run(); 19 | 20 | println!("Successfully loaded and unloaded assets."); 21 | println!( 22 | r#"Check the asset metadata using the CLI! 23 | Open a new terminal without exiting this program, and run: 24 | - `cd cli` # from the project root 25 | - `cargo run` 26 | - Try `show_all` to get UUIDs of all indexed assets, then `get` a returned uuid 27 | - `help` to list all available commands. 28 | "# 29 | ); 30 | use std::io::{Read, Write}; 31 | let mut stdin = std::io::stdin(); 32 | let mut stdout = std::io::stdout(); 33 | 34 | write!(stdout, "Press any key to exit...").unwrap(); 35 | stdout.flush().unwrap(); 36 | 37 | let _ = stdin.read(&mut [0u8]).unwrap(); 38 | } 39 | -------------------------------------------------------------------------------- /examples/handle_integration/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "handle_integration" 3 | version = "0.0.1" 4 | authors = ["Karl Bergström "] 5 | edition = "2018" 6 | readme = "README.md" 7 | license = "MIT/Apache-2.0" 8 | publish = false 9 | 10 | [dependencies] 11 | distill = { version = "=0.0.3", path = "../..", features = ["serde_importers", "pretty_log"] } 12 | futures-executor = { version = "0.3", default-features = false } 13 | 14 | image2 = { version = "0.11", features = ["ser"] } 15 | log = { version = "0.4", features = ["serde"] } 16 | serde = "1" 17 | uuid = "0.8.2" 18 | bincode = "1.3.1" 19 | -------------------------------------------------------------------------------- /examples/handle_integration/assets/amethyst.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/amethyst/distill/852e48272ab3dad76c7490fce8df52461f94ebc0/examples/handle_integration/assets/amethyst.png -------------------------------------------------------------------------------- /examples/handle_integration/assets/amethyst.png.meta: -------------------------------------------------------------------------------- 1 | ( 2 | version: 2, 3 | importer_options: (), 4 | importer_state: (Some("36605880-e92e-4d87-818f-acc2ab236e9c")), 5 | ) -------------------------------------------------------------------------------- /examples/handle_integration/assets/custom_asset.ron: -------------------------------------------------------------------------------- 1 | { 2 | "fab4249b-f95d-411d-a017-7549df090a4f": 3 | ( 4 | cool_string: "thanks", 5 | handle_made_from_path: "amethyst.png", 6 | handle_made_from_uuid: "36605880-e92e-4d87-818f-acc2ab236e9c" 7 | ) 8 | } -------------------------------------------------------------------------------- /examples/handle_integration/assets/custom_asset.ron.meta: -------------------------------------------------------------------------------- 1 | ( 2 | version: 2, 3 | importer_options: (), 4 | importer_state: ( 5 | id: Some("dfbee823-6086-41fb-b47d-cb5a27024861"), 6 | ), 7 | ) -------------------------------------------------------------------------------- /examples/handle_integration/src/custom_asset.rs: -------------------------------------------------------------------------------- 1 | use distill::{ 2 | core::{type_uuid, type_uuid::TypeUuid}, 3 | importer::{self as distill_importer, typetag, SerdeImportable}, 4 | loader::handle::Handle, 5 | }; 6 | use serde::{Deserialize, Serialize}; 7 | 8 | #[derive(Serialize, Deserialize, TypeUuid, SerdeImportable, Debug)] 9 | #[uuid = "fab4249b-f95d-411d-a017-7549df090a4f"] 10 | pub struct BigPerf { 11 | pub cool_string: String, 12 | pub handle_made_from_path: Handle, 13 | pub handle_made_from_uuid: Handle, 14 | } 15 | -------------------------------------------------------------------------------- /examples/handle_integration/src/game.rs: -------------------------------------------------------------------------------- 1 | use std::sync::Arc; 2 | 3 | use distill::loader::{ 4 | crossbeam_channel::{unbounded, Receiver}, 5 | handle::{self, AssetHandle, Handle, RefOp, WeakHandle}, 6 | storage::{DefaultIndirectionResolver, IndirectIdentifier, LoadStatus}, 7 | Loader, RpcIO, 8 | }; 9 | 10 | use crate::{custom_asset::BigPerf, image::Image, storage::GenericAssetStorage}; 11 | 12 | struct Game { 13 | storage: GenericAssetStorage, 14 | } 15 | 16 | fn process(loader: &mut Loader, game: &mut Game, chan: &Receiver) { 17 | handle::process_ref_ops(loader, chan); 18 | loader 19 | .process(&mut game.storage, &DefaultIndirectionResolver) 20 | .expect("failed to process loader"); 21 | } 22 | 23 | pub fn run() { 24 | let (tx, rx) = unbounded(); 25 | let tx = Arc::new(tx); 26 | 27 | let mut loader = Loader::new(Box::new(RpcIO::default())); 28 | // let file = std::fs::File::open(std::path::PathBuf::from("my.pack")).unwrap(); 29 | // let file_reader = distill::loader::packfile_io::PackfileReader::new(file).unwrap(); 30 | // let mut loader = Loader::new(Box::new(file_reader)); 31 | let mut game = Game { 32 | storage: GenericAssetStorage::new(tx.clone(), loader.indirection_table()), 33 | }; 34 | game.storage.add_storage::(); 35 | game.storage.add_storage::(); 36 | let weak_handle = { 37 | // add_ref_indirect begins loading of the asset 38 | let handle = loader.add_ref_indirect(IndirectIdentifier::Path("custom_asset.ron".into())); 39 | // From the returned LoadHandle, create a typed, internally refcounted Handle. 40 | // This requires a channel to send increase/decrease over to be able to implement 41 | // Clone and Drop. In a real implementation, you would probably create nicer wrappers for this. 42 | let handle = Handle::::new((*tx).clone(), handle); 43 | loop { 44 | process(&mut loader, &mut game, &rx); 45 | if let LoadStatus::Loaded = handle.load_status(&loader) { 46 | break; 47 | } 48 | } 49 | // From the Storage, use the Handle to get a reference to the loaded asset. 50 | let custom_asset: &BigPerf = handle.asset(&game.storage).expect("failed to get asset"); 51 | // The custom asset has an automatically constructed Handle reference to an Image. 52 | log::info!( 53 | "Image dependency has handle {:?} from path, and {:?} from UUID", 54 | custom_asset.handle_made_from_path.load_handle(), 55 | custom_asset.handle_made_from_uuid.load_handle() 56 | ); 57 | // Handle is automatically refcounted, so it will be dropped at the end of this scope, 58 | // causing the asset and its dependencies to be unloaded. 59 | // We return a WeakHandle of the image dependency to be able to track the unload of the dependency, 60 | // which happens after the dependee. 61 | WeakHandle::new(custom_asset.handle_made_from_path.load_handle()) 62 | }; 63 | loop { 64 | process(&mut loader, &mut game, &rx); 65 | if let LoadStatus::NotRequested = weak_handle.load_status(&loader) { 66 | break; 67 | } 68 | } 69 | } 70 | -------------------------------------------------------------------------------- /examples/handle_integration/src/image.rs: -------------------------------------------------------------------------------- 1 | use distill::{ 2 | core::{type_uuid, type_uuid::TypeUuid, AssetUuid}, 3 | importer::{Error, ImportOp, ImportedAsset, Importer, ImporterValue, Result}, 4 | }; 5 | use image2::{color, ImageBuf}; 6 | use serde::{Deserialize, Serialize}; 7 | 8 | #[derive(TypeUuid, Serialize, Deserialize, Debug)] 9 | #[uuid = "d4079e74-3ec9-4ebc-9b77-a87cafdfdada"] 10 | pub enum Image { 11 | Rgb8(ImageBuf), 12 | // ... 13 | } 14 | 15 | #[derive(TypeUuid, Serialize, Deserialize, Default)] 16 | #[uuid = "3c8367c8-45fb-40bb-a229-00e5e9c3fc70"] 17 | pub struct SimpleState(Option); 18 | 19 | #[derive(TypeUuid)] 20 | #[uuid = "720d636b-b79c-42d4-8f46-a2d8e1ada46e"] 21 | pub struct ImageImporter; 22 | impl Importer for ImageImporter { 23 | type Options = (); 24 | type State = SimpleState; 25 | 26 | fn version_static() -> u32 27 | where 28 | Self: Sized, 29 | { 30 | 1 31 | } 32 | 33 | fn version(&self) -> u32 { 34 | Self::version_static() 35 | } 36 | 37 | /// Reads the given bytes and produces assets. 38 | fn import( 39 | &self, 40 | _op: &mut ImportOp, 41 | source: &mut dyn std::io::Read, 42 | _options: &Self::Options, 43 | state: &mut Self::State, 44 | ) -> Result { 45 | let id = state 46 | .0 47 | .unwrap_or_else(|| AssetUuid(*uuid::Uuid::new_v4().as_bytes())); 48 | *state = SimpleState(Some(id)); 49 | let mut bytes = Vec::new(); 50 | source.read_to_end(&mut bytes)?; 51 | let asset = Image::Rgb8(image2::io::decode(&bytes).map_err(|e| Error::Boxed(Box::new(e)))?); 52 | Ok(ImporterValue { 53 | assets: vec![ImportedAsset { 54 | id, 55 | search_tags: vec![], 56 | build_deps: vec![], 57 | load_deps: vec![], 58 | build_pipeline: None, 59 | asset_data: Box::new(asset), 60 | }], 61 | }) 62 | } 63 | } 64 | -------------------------------------------------------------------------------- /examples/handle_integration/src/main.rs: -------------------------------------------------------------------------------- 1 | mod custom_asset; 2 | mod game; 3 | mod image; 4 | mod storage; 5 | 6 | use std::path::PathBuf; 7 | 8 | use distill::daemon::{init_logging, AssetDaemon}; 9 | 10 | fn main() { 11 | init_logging().expect("failed to init logging"); 12 | std::thread::spawn(move || { 13 | AssetDaemon::default() 14 | .with_importer(&["png"], crate::image::ImageImporter) 15 | .with_db_path(".assets_db") 16 | .with_address("127.0.0.1:9999".parse().unwrap()) 17 | .with_asset_dirs(vec![PathBuf::from("assets")]) 18 | .run(); 19 | }); 20 | game::run(); 21 | 22 | println!("Successfully loaded and unloaded assets."); 23 | println!( 24 | r#"Check the asset metadata using the CLI! 25 | Open a new terminal without exiting this program, and run: 26 | - `cd cli` # from the project root 27 | - `cargo run` 28 | - Try `show_all` to get UUIDs of all indexed assets, then `get` a returned uuid 29 | - `help` to list all available commands. 30 | "# 31 | ); 32 | use std::io::{Read, Write}; 33 | let mut stdin = std::io::stdin(); 34 | let mut stdout = std::io::stdout(); 35 | 36 | write!(stdout, "Press any key to exit...").unwrap(); 37 | stdout.flush().unwrap(); 38 | 39 | let _ = stdin.read(&mut [0u8]).unwrap(); 40 | } 41 | -------------------------------------------------------------------------------- /examples/handle_integration/src/storage.rs: -------------------------------------------------------------------------------- 1 | use std::{any::Any, cell::RefCell, collections::HashMap, error::Error, sync::Arc}; 2 | 3 | use distill::{ 4 | core::type_uuid::TypeUuid, 5 | loader::{ 6 | crossbeam_channel::Sender, 7 | handle::{AssetHandle, RefOp, TypedAssetStorage}, 8 | storage::{AssetLoadOp, AssetStorage, IndirectionTable, LoadHandle, LoaderInfoProvider}, 9 | AssetTypeId, 10 | }, 11 | }; 12 | 13 | pub struct GenericAssetStorage { 14 | storage: RefCell>>, 15 | refop_sender: Arc>, 16 | indirection_table: IndirectionTable, 17 | } 18 | 19 | impl GenericAssetStorage { 20 | pub fn new(refop_sender: Arc>, indirection_table: IndirectionTable) -> Self { 21 | Self { 22 | storage: RefCell::new(HashMap::new()), 23 | refop_sender, 24 | indirection_table, 25 | } 26 | } 27 | 28 | pub fn add_storage serde::Deserialize<'a> + 'static>(&self) { 29 | let mut storages = self.storage.borrow_mut(); 30 | storages.insert( 31 | AssetTypeId(T::UUID), 32 | Box::new(Storage::::new( 33 | self.refop_sender.clone(), 34 | self.indirection_table.clone(), 35 | )), 36 | ); 37 | } 38 | } 39 | 40 | struct AssetState { 41 | version: u32, 42 | asset: A, 43 | } 44 | pub struct Storage { 45 | refop_sender: Arc>, 46 | assets: HashMap>, 47 | uncommitted: HashMap>, 48 | indirection_table: IndirectionTable, 49 | } 50 | impl Storage { 51 | fn new(sender: Arc>, indirection_table: IndirectionTable) -> Self { 52 | Self { 53 | refop_sender: sender, 54 | assets: HashMap::new(), 55 | uncommitted: HashMap::new(), 56 | indirection_table, 57 | } 58 | } 59 | 60 | fn get(&self, handle: &T) -> Option<&A> { 61 | let handle = if handle.load_handle().is_indirect() { 62 | self.indirection_table.resolve(handle.load_handle())? 63 | } else { 64 | handle.load_handle() 65 | }; 66 | self.assets.get(&handle).map(|a| &a.asset) 67 | } 68 | 69 | fn get_version(&self, handle: &T) -> Option { 70 | let handle = if handle.load_handle().is_indirect() { 71 | self.indirection_table.resolve(handle.load_handle())? 72 | } else { 73 | handle.load_handle() 74 | }; 75 | self.assets.get(&handle).map(|a| a.version) 76 | } 77 | 78 | fn get_asset_with_version(&self, handle: &T) -> Option<(&A, u32)> { 79 | let handle = if handle.load_handle().is_indirect() { 80 | self.indirection_table.resolve(handle.load_handle())? 81 | } else { 82 | handle.load_handle() 83 | }; 84 | self.assets.get(&handle).map(|a| (&a.asset, a.version)) 85 | } 86 | } 87 | impl serde::Deserialize<'a> + 'static> TypedAssetStorage 88 | for GenericAssetStorage 89 | { 90 | fn get(&self, handle: &T) -> Option<&A> { 91 | // This transmute can probably be unsound, but I don't have the energy to fix it right now 92 | unsafe { 93 | std::mem::transmute( 94 | self.storage 95 | .borrow() 96 | .get(&AssetTypeId(A::UUID)) 97 | .expect("unknown asset type") 98 | .as_ref() 99 | .any() 100 | .downcast_ref::>() 101 | .expect("failed to downcast") 102 | .get(handle), 103 | ) 104 | } 105 | } 106 | 107 | fn get_version(&self, handle: &T) -> Option { 108 | self.storage 109 | .borrow() 110 | .get(&AssetTypeId(A::UUID)) 111 | .expect("unknown asset type") 112 | .as_ref() 113 | .any() 114 | .downcast_ref::>() 115 | .expect("failed to downcast") 116 | .get_version(handle) 117 | } 118 | 119 | fn get_asset_with_version(&self, handle: &T) -> Option<(&A, u32)> { 120 | // This transmute can probably be unsound, but I don't have the energy to fix it right now 121 | unsafe { 122 | std::mem::transmute( 123 | self.storage 124 | .borrow() 125 | .get(&AssetTypeId(A::UUID)) 126 | .expect("unknown asset type") 127 | .as_ref() 128 | .any() 129 | .downcast_ref::>() 130 | .expect("failed to downcast") 131 | .get_asset_with_version(handle), 132 | ) 133 | } 134 | } 135 | } 136 | pub trait TypedStorage: Any { 137 | fn any(&self) -> &dyn Any; 138 | fn update_asset( 139 | &mut self, 140 | loader_info: &dyn LoaderInfoProvider, 141 | data: Vec, 142 | load_handle: LoadHandle, 143 | load_op: AssetLoadOp, 144 | version: u32, 145 | ) -> Result<(), Box>; 146 | fn commit_asset_version(&mut self, handle: LoadHandle, version: u32); 147 | fn free(&mut self, handle: LoadHandle, version: u32); 148 | } 149 | 150 | impl serde::Deserialize<'a> + 'static + TypeUuid> TypedStorage for Storage { 151 | fn any(&self) -> &dyn Any { 152 | self 153 | } 154 | 155 | fn update_asset( 156 | &mut self, 157 | loader_info: &dyn LoaderInfoProvider, 158 | data: Vec, 159 | load_handle: LoadHandle, 160 | load_op: AssetLoadOp, 161 | version: u32, 162 | ) -> Result<(), Box> { 163 | // To enable automatic serde of Handle, we need to set up a SerdeContext with a RefOp sender 164 | let asset = futures_executor::block_on(distill::loader::handle::SerdeContext::with( 165 | loader_info, 166 | (*self.refop_sender).clone(), 167 | async { bincode::deserialize::(&data) }, 168 | )) 169 | .expect("failed to deserialize asset"); 170 | self.uncommitted 171 | .insert(load_handle, AssetState { version, asset }); 172 | log::info!("{} bytes loaded for {:?}", data.len(), load_handle); 173 | // The loading process could be async, in which case you can delay 174 | // calling `load_op.complete` as it should only be done when the asset is usable. 175 | load_op.complete(); 176 | Ok(()) 177 | } 178 | 179 | fn commit_asset_version(&mut self, load_handle: LoadHandle, _version: u32) { 180 | // The commit step is done after an asset load has completed. 181 | // It exists to avoid frames where an asset that was loaded is unloaded, which 182 | // could happen when hot reloading. To support this case, you must support having multiple 183 | // versions of an asset loaded at the same time. 184 | self.assets.insert( 185 | load_handle, 186 | self.uncommitted 187 | .remove(&load_handle) 188 | .expect("asset not present when committing"), 189 | ); 190 | log::info!("Commit {:?}", load_handle); 191 | } 192 | 193 | fn free(&mut self, load_handle: LoadHandle, version: u32) { 194 | if let Some(asset) = self.uncommitted.get(&load_handle) { 195 | if asset.version == version { 196 | self.uncommitted.remove(&load_handle); 197 | } 198 | } 199 | if let Some(asset) = self.assets.get(&load_handle) { 200 | if asset.version == version { 201 | self.assets.remove(&load_handle); 202 | } 203 | } 204 | log::info!("Free {:?}", load_handle); 205 | } 206 | } 207 | 208 | // Untyped implementation of AssetStorage that finds the asset_type's storage and forwards the call 209 | impl AssetStorage for GenericAssetStorage { 210 | fn update_asset( 211 | &mut self, 212 | loader_info: &dyn LoaderInfoProvider, 213 | asset_type_id: &AssetTypeId, 214 | data: Vec, 215 | load_handle: LoadHandle, 216 | load_op: AssetLoadOp, 217 | version: u32, 218 | ) -> Result<(), Box> { 219 | self.storage 220 | .borrow_mut() 221 | .get_mut(asset_type_id) 222 | .expect("unknown asset type") 223 | .update_asset(loader_info, data, load_handle, load_op, version) 224 | } 225 | 226 | fn commit_asset_version( 227 | &mut self, 228 | asset_type: &AssetTypeId, 229 | load_handle: LoadHandle, 230 | version: u32, 231 | ) { 232 | self.storage 233 | .borrow_mut() 234 | .get_mut(asset_type) 235 | .expect("unknown asset type") 236 | .commit_asset_version(load_handle, version) 237 | } 238 | 239 | fn free(&mut self, asset_type_id: &AssetTypeId, load_handle: LoadHandle, version: u32) { 240 | self.storage 241 | .borrow_mut() 242 | .get_mut(asset_type_id) 243 | .expect("unknown asset type") 244 | .free(load_handle, version) 245 | } 246 | } 247 | -------------------------------------------------------------------------------- /importer/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "distill-importer" 3 | version = "0.0.3" 4 | authors = ["Karl Bergström "] 5 | edition = "2018" 6 | license = "MIT OR Apache-2.0" 7 | description = "Importer component of the asset pipeline `distill`." 8 | 9 | [dependencies] 10 | distill-core = { path = "../core", version = "=0.0.3", features = ["serde-1"] } 11 | distill-serde-importable-derive = { path = "./serde-importable-derive", version = "=0.0.3", optional = true } 12 | 13 | uuid = { version = "0.8.2", features = ["v4", "wasm-bindgen"] } 14 | serde = "1" 15 | erased-serde = "0.3" 16 | ron = { version = "0.6.4", optional = true } 17 | typetag = { version = "0.1", optional = true } 18 | futures = "0.3" 19 | log = { version = "0.4", features = ["serde"] } 20 | 21 | [features] 22 | serde_importers = [ 23 | "typetag", 24 | "distill-serde-importable-derive", 25 | "ron", 26 | "distill-core/type_uuid", 27 | ] -------------------------------------------------------------------------------- /importer/serde-importable-derive/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "distill-serde-importable-derive" 3 | version = "0.0.3" 4 | authors = ["Karl Bergström "] 5 | edition = "2018" 6 | license = "MIT OR Apache-2.0" 7 | description = "Proc macro for SerdeImportables in `distill`." 8 | 9 | [lib] 10 | proc-macro = true 11 | 12 | [dependencies] 13 | syn = { version = "1.0.58", features = ["derive"] } 14 | quote = "1.0.8" 15 | -------------------------------------------------------------------------------- /importer/serde-importable-derive/src/lib.rs: -------------------------------------------------------------------------------- 1 | extern crate proc_macro; 2 | 3 | use quote::quote; 4 | use syn::*; 5 | 6 | #[proc_macro_derive(SerdeImportable, attributes(uuid))] 7 | pub fn serde_importable_derive(input: proc_macro::TokenStream) -> proc_macro::TokenStream { 8 | // Construct a representation of Rust code as a syntax tree 9 | // that we can manipulate 10 | let ast: DeriveInput = syn::parse(input).unwrap(); 11 | 12 | // Build the trait implementation 13 | let name = &ast.ident; 14 | 15 | let mut uuid = None; 16 | for attribute in ast.attrs.iter().filter_map(|attr| attr.parse_meta().ok()) { 17 | let name_value = if let Meta::NameValue(name_value) = attribute { 18 | name_value 19 | } else { 20 | continue; 21 | }; 22 | 23 | if name_value.path.get_ident().unwrap() != "uuid" { 24 | continue; 25 | } 26 | 27 | let uuid_str = match name_value.lit { 28 | Lit::Str(lit_str) => lit_str, 29 | _ => panic!("uuid attribute must take the form `#[uuid = \"xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx\"`"), 30 | }; 31 | 32 | uuid = Some(uuid_str); 33 | } 34 | 35 | let uuid = 36 | uuid.expect("No `#[uuid = \"xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx\"` attribute found"); 37 | let gen = quote! { 38 | #[distill_importer::typetag::serde(name = #uuid)] 39 | impl distill_importer::SerdeImportable for #name { 40 | } 41 | }; 42 | gen.into() 43 | } 44 | -------------------------------------------------------------------------------- /importer/src/boxed_importer.rs: -------------------------------------------------------------------------------- 1 | use distill_core::TypeUuidDynamic; 2 | use erased_serde::Deserializer; 3 | use futures::{future::BoxFuture, AsyncRead, AsyncWrite}; 4 | use serde::{Deserialize, Serialize}; 5 | 6 | use crate::{ 7 | error::Result, AsyncImporter, ExportAsset, ImportOp, ImportSource, ImporterValue, IntoSerdeObj, 8 | SerdeObj, 9 | }; 10 | 11 | /// Version of the SourceMetadata struct. 12 | /// Used for forward compatibility to enable changing the .meta file format 13 | pub const SOURCEMETADATA_VERSION: u32 = 2; 14 | 15 | /// SourceMetadata is the in-memory representation of the .meta file for a (source, .meta) pair. 16 | #[derive(Serialize, Deserialize)] 17 | pub struct SourceMetadata { 18 | /// Metadata struct version 19 | pub version: u32, 20 | /// The [`crate::Importer::Options`] used to import the source file. 21 | pub importer_options: Options, 22 | /// The [`crate::Importer::State`] generated when importing the source file. 23 | pub importer_state: State, 24 | } 25 | 26 | /// Trait object wrapper for [`crate::Importer`] implementations. 27 | /// Enables using Importers without knowing the concrete type. 28 | /// See [`crate::Importer`] for documentation on fields. 29 | pub trait BoxedImporter: TypeUuidDynamic + Send + Sync + 'static { 30 | fn import_boxed<'a>( 31 | &'a self, 32 | op: &'a mut ImportOp, 33 | source: &'a mut (dyn AsyncRead + Unpin + Send + Sync), 34 | options: Box, 35 | state: Box, 36 | ) -> BoxFuture<'a, Result>; 37 | fn export_boxed<'a>( 38 | &'a self, 39 | output: &'a mut (dyn AsyncWrite + Unpin + Send + Sync), 40 | options: Box, 41 | state: Box, 42 | assets: Vec, 43 | ) -> BoxFuture<'a, Result>; 44 | fn default_options_boxed(&self, import_source: ImportSource<'_>) -> Box; 45 | fn options_type_uuid(&self) -> [u8; 16]; 46 | fn default_state(&self) -> Box; 47 | fn version(&self) -> u32; 48 | fn deserialize_metadata( 49 | &self, 50 | deserializer: &mut dyn Deserializer<'_>, 51 | ) -> Result, Box>>; 52 | fn deserialize_options( 53 | &self, 54 | deserializer: &mut dyn Deserializer<'_>, 55 | ) -> Result>; 56 | fn deserialize_state( 57 | &self, 58 | deserializer: &mut dyn Deserializer<'_>, 59 | ) -> Result>; 60 | } 61 | 62 | impl std::fmt::Debug for dyn BoxedImporter { 63 | fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { 64 | f.debug_tuple("BoxedImporter").field(&self.uuid()).finish() 65 | } 66 | } 67 | 68 | /// Trait object wrapper for [ImporterValue] implementations. 69 | /// See [ImporterValue] for documentation on fields. 70 | pub struct BoxedImporterValue { 71 | pub value: ImporterValue, 72 | pub options: Box, 73 | pub state: Box, 74 | } 75 | 76 | /// Return value for BoxedImporter::export_boxed 77 | pub struct BoxedExportInputs { 78 | pub options: Box, 79 | pub state: Box, 80 | pub value: ImporterValue, 81 | } 82 | 83 | impl BoxedImporter for T 84 | where 85 | O: SerdeObj + Serialize + Default + Send + Sync + Clone + for<'a> Deserialize<'a>, 86 | S: SerdeObj + Serialize + Default + Send + Sync + for<'a> Deserialize<'a>, 87 | T: AsyncImporter + TypeUuidDynamic + Send + Sync, 88 | { 89 | fn import_boxed<'a>( 90 | &'a self, 91 | op: &'a mut ImportOp, 92 | source: &'a mut (dyn AsyncRead + Unpin + Send + Sync), 93 | options: Box, 94 | mut state: Box, 95 | ) -> BoxFuture<'a, Result> { 96 | log::trace!("import_boxed"); 97 | Box::pin(async move { 98 | let s = state.any_mut().downcast_mut::(); 99 | let s = if let Some(s) = s { 100 | s 101 | } else { 102 | panic!("Failed to downcast Importer::State"); 103 | }; 104 | let o = options.any().downcast_ref::(); 105 | let o = if let Some(o) = o { 106 | o 107 | } else { 108 | panic!("Failed to downcast Importer::Options"); 109 | }; 110 | 111 | log::trace!("import_boxed about to import"); 112 | let result = self.import(op, source, o, s).await?; 113 | log::trace!("import_boxed imported"); 114 | Ok(BoxedImporterValue { 115 | value: result, 116 | options, 117 | state, 118 | }) 119 | }) 120 | } 121 | 122 | fn export_boxed<'a>( 123 | &'a self, 124 | output: &'a mut (dyn AsyncWrite + Unpin + Send + Sync), 125 | options: Box, 126 | mut state: Box, 127 | assets: Vec, 128 | ) -> BoxFuture<'a, Result> { 129 | Box::pin(async move { 130 | let s = state.any_mut().downcast_mut::(); 131 | let s = if let Some(s) = s { 132 | s 133 | } else { 134 | panic!("Failed to downcast Importer::State"); 135 | }; 136 | let o = options.any().downcast_ref::(); 137 | let o = if let Some(o) = o { 138 | o 139 | } else { 140 | panic!("Failed to downcast Importer::Options"); 141 | }; 142 | 143 | let result = self.export(output, o, s, assets).await?; 144 | Ok(BoxedExportInputs { 145 | options, 146 | state, 147 | value: result, 148 | }) 149 | }) 150 | } 151 | 152 | fn default_options_boxed(&self, import_source: ImportSource<'_>) -> Box { 153 | self.default_options(import_source) 154 | .map(|x| Box::new(x).into_serde_obj()) 155 | .unwrap_or_else(|| Box::new(O::default())) 156 | } 157 | 158 | fn options_type_uuid(&self) -> [u8; 16] { 159 | O::default().uuid() 160 | } 161 | 162 | fn default_state(&self) -> Box { 163 | Box::new(S::default()) 164 | } 165 | 166 | fn version(&self) -> u32 { 167 | T::version(self) 168 | } 169 | 170 | fn deserialize_metadata<'a>( 171 | &self, 172 | deserializer: &mut dyn Deserializer<'_>, 173 | ) -> Result, Box>> { 174 | let metadata = erased_serde::deserialize::>(deserializer)?; 175 | Ok(SourceMetadata { 176 | version: metadata.version, 177 | importer_options: Box::new(metadata.importer_options), 178 | importer_state: Box::new(metadata.importer_state), 179 | }) 180 | } 181 | 182 | fn deserialize_options<'a>( 183 | &self, 184 | deserializer: &mut dyn Deserializer<'_>, 185 | ) -> Result> { 186 | Ok(Box::new(erased_serde::deserialize::(deserializer)?)) 187 | } 188 | 189 | fn deserialize_state<'a>( 190 | &self, 191 | deserializer: &mut dyn Deserializer<'_>, 192 | ) -> Result> { 193 | Ok(Box::new(erased_serde::deserialize::(deserializer)?)) 194 | } 195 | } 196 | -------------------------------------------------------------------------------- /importer/src/error.rs: -------------------------------------------------------------------------------- 1 | use std::fmt; 2 | 3 | pub type Result = std::result::Result; 4 | 5 | #[derive(Debug)] 6 | #[non_exhaustive] 7 | pub enum Error { 8 | IoError(std::io::Error), 9 | Uuid(uuid::Error), 10 | ErasedSerde(erased_serde::Error), 11 | #[cfg(feature = "serde_importers")] 12 | RonDe(ron::de::Error), 13 | Boxed(Box), 14 | ExportUnsupported, 15 | Custom(String), 16 | } 17 | 18 | impl std::error::Error for Error { 19 | fn source(&self) -> Option<&(dyn std::error::Error + 'static)> { 20 | match *self { 21 | Error::IoError(ref e) => Some(e), 22 | Error::Uuid(ref e) => Some(e), 23 | Error::ErasedSerde(ref e) => Some(e), 24 | #[cfg(feature = "serde_importers")] 25 | Error::RonDe(ref e) => Some(e), 26 | Error::Boxed(ref e) => e.source(), 27 | Error::ExportUnsupported => None, 28 | Error::Custom(_) => None, 29 | } 30 | } 31 | } 32 | 33 | impl fmt::Display for Error { 34 | fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { 35 | match *self { 36 | Error::Uuid(ref e) => e.fmt(f), 37 | Error::IoError(ref e) => e.fmt(f), 38 | Error::ErasedSerde(ref e) => e.fmt(f), 39 | #[cfg(feature = "serde_importers")] 40 | Error::RonDe(ref e) => e.fmt(f), 41 | Error::Boxed(ref e) => e.fmt(f), 42 | Error::ExportUnsupported => write!(f, "{:?}", self), 43 | Error::Custom(ref e) => write!(f, "{}", e), 44 | } 45 | } 46 | } 47 | 48 | impl From for Error { 49 | fn from(err: uuid::Error) -> Error { 50 | Error::Uuid(err) 51 | } 52 | } 53 | 54 | impl From for Error { 55 | fn from(err: std::io::Error) -> Error { 56 | Error::IoError(err) 57 | } 58 | } 59 | impl From for Error { 60 | fn from(err: erased_serde::Error) -> Error { 61 | Error::ErasedSerde(err) 62 | } 63 | } 64 | 65 | #[cfg(feature = "serde_importers")] 66 | impl From for Error { 67 | fn from(err: ron::de::Error) -> Error { 68 | Error::RonDe(err) 69 | } 70 | } 71 | 72 | impl From> for Error { 73 | fn from(err: Box) -> Error { 74 | Error::Boxed(err) 75 | } 76 | } 77 | 78 | impl From for Error { 79 | fn from(err: String) -> Error { 80 | Error::Custom(err) 81 | } 82 | } 83 | 84 | impl From<&str> for Error { 85 | fn from(err: &str) -> Error { 86 | Error::Custom(err.into()) 87 | } 88 | } 89 | -------------------------------------------------------------------------------- /importer/src/ron_importer.rs: -------------------------------------------------------------------------------- 1 | use std::io::Read; 2 | 3 | use distill_core::{type_uuid, type_uuid::TypeUuid, AssetUuid}; 4 | use ron::de::from_reader; 5 | use serde::{Deserialize, Serialize}; 6 | 7 | use crate::{ImportOp, ImportedAsset, Importer, ImporterValue, Result, SerdeImportable}; 8 | 9 | #[derive(Default, Deserialize, Serialize, TypeUuid, Clone, Copy)] 10 | #[uuid = "f3cd048a-2c98-4e4b-95a2-d7c0ee6f7beb"] 11 | pub struct RonImporterOptions {} 12 | 13 | /// A simple state for Importer to retain the same UUID between imports 14 | /// for all single-asset source files 15 | #[derive(Default, Deserialize, Serialize, TypeUuid)] 16 | #[uuid = "fabe2809-dcc0-4463-b741-a456ca6b28ed"] 17 | pub struct RonImporterState { 18 | pub id: Option, 19 | } 20 | 21 | #[derive(Default, TypeUuid)] 22 | #[uuid = "162ede20-6fdd-44c1-8387-8f93983c067c"] 23 | pub struct RonImporter; 24 | 25 | impl Importer for RonImporter { 26 | type Options = RonImporterOptions; 27 | type State = RonImporterState; 28 | 29 | fn version_static() -> u32 { 30 | 1 31 | } 32 | 33 | fn version(&self) -> u32 { 34 | Self::version_static() 35 | } 36 | 37 | fn import( 38 | &self, 39 | _op: &mut ImportOp, 40 | source: &mut dyn Read, 41 | _: &Self::Options, 42 | state: &mut Self::State, 43 | ) -> Result { 44 | if state.id.is_none() { 45 | state.id = Some(AssetUuid(*uuid::Uuid::new_v4().as_bytes())); 46 | } 47 | let de: Box = from_reader(source)?; 48 | 49 | Ok(ImporterValue { 50 | assets: vec![ImportedAsset { 51 | id: state.id.expect("AssetUuid not generated"), 52 | search_tags: Vec::new(), 53 | build_deps: Vec::new(), 54 | load_deps: Vec::new(), 55 | asset_data: de.into_serde_obj(), 56 | build_pipeline: None, 57 | }], 58 | }) 59 | } 60 | } 61 | #[cfg(test)] 62 | mod tests { 63 | use std::collections::HashMap; 64 | 65 | use super::*; 66 | use crate as distill_importer; 67 | use crate::*; 68 | 69 | #[derive(Serialize, Deserialize, TypeUuid, SerdeImportable, PartialEq, Eq)] 70 | #[uuid = "36fb2083-7195-4583-8af9-0965f10ae60d"] 71 | struct A { 72 | x: u32, 73 | } 74 | 75 | #[derive(Serialize, Deserialize, TypeUuid, SerdeImportable, PartialEq)] 76 | #[uuid = "d4b83227-d3f8-47f5-b026-db615fb41d31"] 77 | struct B { 78 | s: String, 79 | a: A, 80 | m: HashMap, 81 | } 82 | 83 | #[test] 84 | fn ron_importer_simple_test() { 85 | let importer: Box = Box::new(RonImporter::default()); 86 | 87 | let mut a = "{ 88 | \"36fb2083-7195-4583-8af9-0965f10ae60d\": 89 | ( 90 | x: 30, 91 | ) 92 | }" 93 | .as_bytes(); 94 | 95 | let mut import_op = ImportOp::default(); 96 | let a_boxed_res = futures::executor::block_on(importer.import_boxed( 97 | &mut import_op, 98 | &mut a, 99 | Box::new(RonImporterOptions {}), 100 | Box::new(RonImporterState { id: None }), 101 | )) 102 | .unwrap(); 103 | let a_serde_obj = a_boxed_res 104 | .value 105 | .assets 106 | .into_iter() 107 | .next() 108 | .unwrap() 109 | .asset_data; 110 | 111 | let a_downcast = a_serde_obj.any().downcast_ref::(); 112 | match a_downcast { 113 | Some(a) => assert_eq!(a.x, 30), 114 | None => panic!("Expected serde_obj to be downcast to `A`."), 115 | } 116 | } 117 | 118 | #[test] 119 | fn ron_importer_complex_test() { 120 | let importer: Box = Box::new(RonImporter::default()); 121 | 122 | let mut b = "{ 123 | \"d4b83227-d3f8-47f5-b026-db615fb41d31\": 124 | ( 125 | s: \"Ferris\", 126 | a: ( 127 | x: 30 128 | ), 129 | m: { 130 | \"lorem\": \"ipsum\", 131 | \"dolor\": \"sim\", 132 | } 133 | ) 134 | }" 135 | .as_bytes(); 136 | 137 | let mut op = ImportOp::default(); 138 | let b_boxed_res = futures::executor::block_on(importer.import_boxed( 139 | &mut op, 140 | &mut b, 141 | Box::new(RonImporterOptions {}), 142 | Box::new(RonImporterState { id: None }), 143 | )) 144 | .unwrap(); 145 | let b_serde_obj = b_boxed_res 146 | .value 147 | .assets 148 | .into_iter() 149 | .next() 150 | .unwrap() 151 | .asset_data; 152 | let b_downcast = b_serde_obj.any().downcast_ref::(); 153 | match b_downcast { 154 | Some(b) => { 155 | assert_eq!(b.s, "Ferris"); 156 | assert_eq!(b.a.x, 30); 157 | assert_eq!(b.m["lorem"], "ipsum"); 158 | assert_eq!(b.m["dolor"], "sim"); 159 | assert_eq!(b.m.len(), 2); 160 | } 161 | None => panic!("Expected serde_obj to be downcast to `B`."), 162 | } 163 | } 164 | } 165 | -------------------------------------------------------------------------------- /importer/src/serde_obj.rs: -------------------------------------------------------------------------------- 1 | use std::any::Any; 2 | 3 | use distill_core::TypeUuidDynamic; 4 | use erased_serde::*; 5 | 6 | /// A trait for serializing any struct with a TypeUuid 7 | pub trait SerdeObj: Any + Serialize + TypeUuidDynamic + Send { 8 | fn any(&self) -> &dyn Any; 9 | fn any_mut(&mut self) -> &mut dyn Any; 10 | } 11 | impl SerdeObj for T { 12 | fn any(&self) -> &dyn Any { 13 | self 14 | } 15 | 16 | fn any_mut(&mut self) -> &mut dyn Any { 17 | self 18 | } 19 | } 20 | 21 | pub trait IntoSerdeObj { 22 | fn into_serde_obj(self: Box) -> Box 23 | where 24 | Self: 'static; 25 | } 26 | 27 | impl IntoSerdeObj for T { 28 | fn into_serde_obj(self: Box) -> Box 29 | where 30 | Self: 'static, 31 | { 32 | self 33 | } 34 | } 35 | 36 | #[cfg(feature = "serde_importers")] 37 | #[typetag::serde] 38 | pub trait SerdeImportable: SerdeObj + IntoSerdeObj {} 39 | 40 | #[cfg(feature = "serde_importers")] 41 | #[doc(hidden)] 42 | pub use distill_serde_importable_derive::*; 43 | #[doc(hidden)] 44 | #[cfg(feature = "serde_importers")] 45 | pub use typetag; 46 | 47 | serialize_trait_object!(SerdeObj); 48 | -------------------------------------------------------------------------------- /importer/src/serialized_asset.rs: -------------------------------------------------------------------------------- 1 | use distill_core::ArtifactMetadata; 2 | 3 | pub struct SerializedAsset> { 4 | pub metadata: ArtifactMetadata, 5 | pub data: T, 6 | } 7 | 8 | impl<'a> SerializedAsset<&'a [u8]> { 9 | pub fn to_vec(&self) -> SerializedAsset> { 10 | SerializedAsset { 11 | metadata: self.metadata.clone(), 12 | data: self.data.to_vec(), 13 | } 14 | } 15 | } 16 | -------------------------------------------------------------------------------- /loader/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "distill-loader" 3 | version = "0.0.3" 4 | authors = ["Karl Bergström "] 5 | edition = "2018" 6 | license = "MIT OR Apache-2.0" 7 | description = "Loader component of the asset pipeline `distill`." 8 | 9 | [dependencies] 10 | distill-core = { path = "../core", version = "=0.0.3", features = ["serde-1"] } 11 | distill-schema = { path = "../schema", version = "=0.0.3", optional = true } 12 | 13 | crossbeam-channel = "0.5.0" 14 | futures-util = { version = "0.3", default-features = false, features = ["io"], optional = true } 15 | futures-channel = { version = "0.3", default-features = false, features = ["alloc"] } 16 | futures-core = { version = "0.3", default-features = false, features = ["alloc"] } 17 | capnp = { version = "0.14.0", features = ["unaligned"], optional = true } 18 | capnp-rpc = { version = "0.14.0", optional = true } 19 | log = "0.4" 20 | dashmap = "4.0.1" 21 | serde = { version = "1", features = ["derive"], optional = true } 22 | uuid = { version = "0.8.2", optional = true } 23 | thread_local = { version = "1.0", optional = true } 24 | async-executor = { version = "1.4.1", optional = true } 25 | bevy_tasks = { version = "0.5.0", optional = true } 26 | pin-project-lite = "0.2.6" 27 | 28 | [target.'cfg(not(target_arch = "wasm32"))'.dependencies] 29 | memmap = { version = "0.7", optional = true } 30 | async-net = { version = "1.6.0", optional = true } 31 | instant = { version = "0.1" } 32 | 33 | [target.'cfg(target_arch = "wasm32")'.dependencies] 34 | websocket-async-io = { version = "1.0", optional = true } 35 | instant = { version = "0.1", features = ["wasm-bindgen"] } 36 | 37 | [features] 38 | default = [] 39 | packfile_io = [ 40 | "distill-schema", 41 | "capnp", 42 | "capnp-rpc", 43 | "futures-util", 44 | "memmap", 45 | "thread_local", 46 | "async-executor", 47 | "bevy_tasks", 48 | "invalidate_path" 49 | ] 50 | rpc_io = [ 51 | "distill-schema", 52 | "capnp", 53 | "capnp-rpc", 54 | "futures-util", 55 | "invalidate_path", 56 | "async-executor", 57 | "async-net", 58 | "websocket-async-io" 59 | ] 60 | invalidate_path = ["distill-core/path_utils"] 61 | handle = ["serde", "uuid"] 62 | -------------------------------------------------------------------------------- /loader/src/io.rs: -------------------------------------------------------------------------------- 1 | use std::{collections::HashMap, path::PathBuf}; 2 | 3 | use crossbeam_channel::Sender; 4 | use distill_core::{ArtifactId, ArtifactMetadata, AssetMetadata, AssetUuid}; 5 | 6 | use crate::{loader::LoaderState, storage::IndirectIdentifier, LoadHandle, Result}; 7 | 8 | /// Provides [`Loader`](crate::loader::Loader) with data. 9 | pub trait LoaderIO: Send + Sync { 10 | fn get_asset_metadata_with_dependencies(&mut self, request: MetadataRequest); 11 | fn get_asset_candidates(&mut self, requests: Vec); 12 | fn get_artifacts(&mut self, requests: Vec); 13 | fn tick(&mut self, loader: &mut LoaderState); 14 | } 15 | 16 | /// A request for an asset artifact's data. 17 | pub struct DataRequest { 18 | pub(crate) tx: Sender<(Result>, LoadHandle, u32)>, 19 | pub(crate) asset_id: AssetUuid, 20 | pub(crate) artifact_id: ArtifactId, 21 | pub(crate) request_data: Option<(LoadHandle, u32)>, 22 | } 23 | impl DataRequest { 24 | pub fn asset_id(&self) -> AssetUuid { 25 | self.asset_id 26 | } 27 | 28 | pub fn artifact_id(&self) -> ArtifactId { 29 | self.artifact_id 30 | } 31 | 32 | pub fn error(mut self, err: T) { 33 | if let Some(request_data) = self.request_data.take() { 34 | let _ = self 35 | .tx 36 | .send((Err(Box::new(err)), request_data.0, request_data.1)); 37 | } 38 | } 39 | 40 | pub fn complete(mut self, data: Vec) { 41 | if let Some(request_data) = self.request_data.take() { 42 | let _ = self.tx.send((Ok(data), request_data.0, request_data.1)); 43 | } 44 | } 45 | } 46 | impl Drop for DataRequest { 47 | fn drop(&mut self) { 48 | if let Some(request_data) = self.request_data.take() { 49 | let _ = self.tx.send(( 50 | Err(Box::new(RequestDropError)), 51 | request_data.0, 52 | request_data.1, 53 | )); 54 | } 55 | } 56 | } 57 | 58 | /// A request for possible candidates for an [`IndirectIdentifier`]. 59 | #[allow(clippy::type_complexity)] 60 | pub struct ResolveRequest { 61 | pub(crate) tx: Sender<( 62 | Result)>>, 63 | IndirectIdentifier, 64 | LoadHandle, 65 | )>, 66 | pub(crate) id: Option<(IndirectIdentifier, LoadHandle)>, 67 | } 68 | impl ResolveRequest { 69 | pub fn identifier(&self) -> &IndirectIdentifier { 70 | self.id.as_ref().map(|v| &v.0).unwrap() 71 | } 72 | 73 | pub fn error(mut self, err: T) { 74 | if let Some(id) = self.id.take() { 75 | let _ = self.tx.send((Err(Box::new(err)), id.0, id.1)); 76 | } 77 | } 78 | 79 | pub fn complete(mut self, data: Vec<(PathBuf, Vec)>) { 80 | if let Some(id) = self.id.take() { 81 | let _ = self.tx.send((Ok(data), id.0, id.1)); 82 | } 83 | } 84 | } 85 | impl Drop for ResolveRequest { 86 | fn drop(&mut self) { 87 | if let Some(id) = self.id.take() { 88 | let _ = self.tx.send((Err(Box::new(RequestDropError)), id.0, id.1)); 89 | } 90 | } 91 | } 92 | #[derive(Debug)] 93 | struct RequestDropError; 94 | impl std::fmt::Display for RequestDropError { 95 | fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { 96 | f.write_str("request dropped") 97 | } 98 | } 99 | impl std::error::Error for RequestDropError {} 100 | 101 | pub struct MetadataRequestResult { 102 | pub artifact_metadata: ArtifactMetadata, 103 | pub asset_metadata: Option, 104 | } 105 | /// A request for artifact metadata covering the dependency graphs of the requested asset IDs. 106 | #[allow(clippy::type_complexity)] 107 | pub struct MetadataRequest { 108 | pub(crate) tx: Sender<( 109 | Result>, 110 | HashMap, 111 | )>, 112 | pub(crate) requests: Option>, 113 | pub(crate) include_asset_metadata: bool, 114 | } 115 | impl MetadataRequest { 116 | pub fn requested_assets(&self) -> impl Iterator { 117 | self.requests.as_ref().unwrap().keys() 118 | } 119 | 120 | /// Whether the response should include asset metadata or not, for debugging purposes. 121 | pub fn include_asset_metadata(&self) -> bool { 122 | self.include_asset_metadata 123 | } 124 | 125 | pub fn error(mut self, err: T) { 126 | if let Some(requests) = self.requests.take() { 127 | let _ = self.tx.send((Err(Box::new(err)), requests)); 128 | } 129 | } 130 | 131 | pub fn complete(mut self, metadata: Vec) { 132 | if let Some(requests) = self.requests.take() { 133 | let _ = self.tx.send((Ok(metadata), requests)); 134 | } 135 | } 136 | } 137 | 138 | impl Drop for MetadataRequest { 139 | fn drop(&mut self) { 140 | if let Some(requests) = self.requests.take() { 141 | let _ = self.tx.send((Err(Box::new(RequestDropError)), requests)); 142 | } 143 | } 144 | } 145 | -------------------------------------------------------------------------------- /loader/src/lib.rs: -------------------------------------------------------------------------------- 1 | #![deny( 2 | rust_2018_compatibility, 3 | rust_2018_idioms, 4 | unused, 5 | unused_extern_crates, 6 | future_incompatible, 7 | nonstandard_style 8 | )] 9 | 10 | /// *feature:* `handle`. Handles provide automatic reference counting of assets, similar to [Rc](`std::rc::Rc`). 11 | #[cfg(feature = "handle")] 12 | pub mod handle; 13 | /// [`LoaderIO`](crate::io::LoaderIO) provides data requested by [`Loader`](crate::loader::Loader). 14 | pub mod io; 15 | /// [`Loader`] loads assets into engine-implemented [`AssetStorage`](crate::storage::AssetStorage)s. 16 | pub mod loader; 17 | #[cfg(feature = "packfile_io")] 18 | pub mod packfile_io; 19 | /// *feature:* `rpc_io`. `RpcIO` is an implementation of [`LoaderIO`](crate::io::LoaderIO) which communicates with `distill_daemon` 20 | /// to load and hot reload assets. Intended for development workflows. 21 | #[cfg(feature = "rpc_io")] 22 | pub mod rpc_io; 23 | /// [`AssetStorage`](crate::storage::AssetStorage) is implemented by engines to store loaded asset data. 24 | pub mod storage; 25 | 26 | mod task_local; 27 | 28 | pub use crossbeam_channel; 29 | pub use distill_core::{AssetRef, AssetTypeId, AssetUuid}; 30 | pub use loader::Loader; 31 | #[cfg(feature = "packfile_io")] 32 | pub use packfile_io::PackfileReader; 33 | #[cfg(feature = "rpc_io")] 34 | pub use rpc_io::RpcIO; 35 | pub use storage::LoadHandle; 36 | 37 | pub type Result = std::result::Result>; 38 | 39 | #[cfg(feature = "handle")] 40 | #[macro_export] 41 | macro_rules! if_handle_enabled { 42 | ($($tt:tt)*) => { 43 | $($tt)* 44 | }; 45 | } 46 | 47 | #[cfg(not(feature = "handle"))] 48 | #[macro_export] 49 | #[doc(hidden)] 50 | macro_rules! if_handle_enabled { 51 | ($($tt:tt)*) => {}; 52 | } 53 | -------------------------------------------------------------------------------- /loader/src/task_local.rs: -------------------------------------------------------------------------------- 1 | use pin_project_lite::pin_project; 2 | use std::cell::RefCell; 3 | use std::error::Error; 4 | use std::future::Future; 5 | use std::pin::Pin; 6 | use std::task::{Context, Poll}; 7 | use std::{fmt, thread}; 8 | 9 | // Reproduced from `tokio` (under MIT license) at https://github.com/tokio-rs/tokio/blob/9a3603fa75ff854e007d372061edf47cf8d02690/tokio/src/task/task_local.rs. 10 | 11 | /// Declares a new task-local key. 12 | /// 13 | /// # Syntax 14 | /// 15 | /// The macro wraps any number of static declarations and makes them local to the current task. 16 | /// Publicity and attributes for each static is preserved. For example: 17 | /// 18 | #[macro_export] 19 | macro_rules! task_local { 20 | // empty (base case for the recursion) 21 | () => {}; 22 | 23 | ($(#[$attr:meta])* $vis:vis static $name:ident: $t:ty; $($rest:tt)*) => { 24 | $crate::__task_local_inner!($(#[$attr])* $vis $name, $t); 25 | $crate::task_local!($($rest)*); 26 | }; 27 | 28 | ($(#[$attr:meta])* $vis:vis static $name:ident: $t:ty) => { 29 | $crate::__task_local_inner!($(#[$attr])* $vis $name, $t); 30 | } 31 | } 32 | 33 | #[doc(hidden)] 34 | #[macro_export] 35 | macro_rules! __task_local_inner { 36 | ($(#[$attr:meta])* $vis:vis $name:ident, $t:ty) => { 37 | $vis static $name: $crate::task_local::LocalKey<$t> = { 38 | std::thread_local! { 39 | static __KEY: std::cell::RefCell> = std::cell::RefCell::new(None); 40 | } 41 | 42 | $crate::task_local::LocalKey { inner: __KEY } 43 | }; 44 | }; 45 | } 46 | 47 | /// A key for task-local data. 48 | /// 49 | /// This type is generated by the `task_local!` macro. 50 | /// 51 | /// Unlike [`std::thread::LocalKey`], `LocalKey` will 52 | /// _not_ lazily initialize the value on first access. Instead, the 53 | /// value is first initialized when the future containing 54 | /// the task-local is first polled by a futures executor. 55 | pub struct LocalKey { 56 | #[doc(hidden)] 57 | pub inner: thread::LocalKey>>, 58 | } 59 | 60 | #[cfg_attr(any(not(feature = "handle"), target_arch = "wasm32"), allow(unused))] 61 | impl LocalKey { 62 | /// Sets a value `T` as the task-local value for the future `F`. 63 | /// 64 | /// On completion of `scope`, the task-local will be dropped. 65 | #[allow(dead_code)] 66 | pub async fn scope(&'static self, value: T, f: F) -> F::Output 67 | where 68 | F: Future, 69 | { 70 | TaskLocalFuture { 71 | local: self, 72 | slot: Some(value), 73 | future: f, 74 | } 75 | .await 76 | } 77 | 78 | /// Accesses the current task-local and runs the provided closure. 79 | /// 80 | /// # Panics 81 | /// 82 | /// This function will panic if not called within the context 83 | /// of a future containing a task-local with the corresponding key. 84 | #[allow(dead_code)] 85 | pub fn with(&'static self, f: F) -> R 86 | where 87 | F: FnOnce(&T) -> R, 88 | { 89 | self.try_with(f).expect( 90 | "cannot access a Task Local Storage value \ 91 | without setting it via `LocalKey::set`", 92 | ) 93 | } 94 | 95 | /// Accesses the current task-local and runs the provided closure. 96 | /// 97 | /// If the task-local with the associated key is not present, this 98 | /// method will return an `AccessError`. For a panicking variant, 99 | /// see `with`. 100 | pub fn try_with(&'static self, f: F) -> Result 101 | where 102 | F: FnOnce(&T) -> R, 103 | { 104 | self.inner.with(|v| { 105 | if let Some(val) = v.borrow().as_ref() { 106 | Ok(f(val)) 107 | } else { 108 | Err(AccessError { _private: () }) 109 | } 110 | }) 111 | } 112 | } 113 | 114 | impl fmt::Debug for LocalKey { 115 | fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { 116 | f.pad("LocalKey { .. }") 117 | } 118 | } 119 | 120 | pin_project! { 121 | struct TaskLocalFuture { 122 | local: &'static LocalKey, 123 | slot: Option, 124 | #[pin] 125 | future: F, 126 | } 127 | } 128 | 129 | impl TaskLocalFuture { 130 | fn with_task) -> R, R>(self: Pin<&mut Self>, f: F2) -> R { 131 | struct Guard<'a, T: 'static> { 132 | local: &'static LocalKey, 133 | slot: &'a mut Option, 134 | prev: Option, 135 | } 136 | 137 | impl Drop for Guard<'_, T> { 138 | fn drop(&mut self) { 139 | let value = self.local.inner.with(|c| c.replace(self.prev.take())); 140 | *self.slot = value; 141 | } 142 | } 143 | 144 | let mut project = self.project(); 145 | let val = project.slot.take(); 146 | 147 | let prev = project.local.inner.with(|c| c.replace(val)); 148 | 149 | let _guard = Guard { 150 | prev, 151 | slot: &mut project.slot, 152 | local: *project.local, 153 | }; 154 | 155 | f(project.future) 156 | } 157 | } 158 | 159 | impl Future for TaskLocalFuture { 160 | type Output = F::Output; 161 | 162 | fn poll(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll { 163 | self.with_task(|f| f.poll(cx)) 164 | } 165 | } 166 | 167 | // Required to make `pin_project` happy. 168 | trait StaticLifetime: 'static {} 169 | impl StaticLifetime for T {} 170 | 171 | /// An error returned by [`LocalKey::try_with`](method@LocalKey::try_with). 172 | #[derive(Clone, Copy, Eq, PartialEq)] 173 | pub struct AccessError { 174 | _private: (), 175 | } 176 | 177 | impl fmt::Debug for AccessError { 178 | fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { 179 | f.debug_struct("AccessError").finish() 180 | } 181 | } 182 | 183 | impl fmt::Display for AccessError { 184 | fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { 185 | fmt::Display::fmt("task-local value not set", f) 186 | } 187 | } 188 | 189 | impl Error for AccessError {} 190 | -------------------------------------------------------------------------------- /processing/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "processing" 3 | version = "0.0.1" 4 | authors = ["Karl Bergström "] 5 | edition = "2018" 6 | license = "MIT OR Apache-2.0" 7 | publish = false 8 | 9 | [dependencies] 10 | uuid = { version = "0.8", features = ["v4"] } 11 | downcast = "0.10" 12 | serde = { version = "1", features = ["derive"] } 13 | serde_derive = "1.0" 14 | erased-serde = "0.3" 15 | petgraph = "0.4.13" 16 | -------------------------------------------------------------------------------- /processing/src/lib.rs: -------------------------------------------------------------------------------- 1 | pub mod graph; 2 | pub mod processor; 3 | -------------------------------------------------------------------------------- /schema/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "distill-schema" 3 | version = "0.0.3" 4 | authors = ["Karl Bergström "] 5 | edition = "2018" 6 | license = "MIT OR Apache-2.0" 7 | description = "RPC schema definitions for the asset pipeline `distill`." 8 | 9 | [dependencies] 10 | distill-core = { path = "../core", version = "=0.0.3" } 11 | 12 | capnp = "0.14.0" 13 | -------------------------------------------------------------------------------- /schema/schema-gen/Cargo.toml: -------------------------------------------------------------------------------- 1 | [workspace] 2 | [package] 3 | name = "schema-gen" 4 | version = "0.0.1" 5 | authors = ["Karl Bergström "] 6 | edition = "2018" 7 | 8 | # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html 9 | 10 | [dependencies] 11 | capnpc = "0.14" 12 | -------------------------------------------------------------------------------- /schema/schema-gen/src/main.rs: -------------------------------------------------------------------------------- 1 | fn main() { 2 | let current_dir = std::env::current_dir().unwrap(); 3 | let parent_dir = current_dir.join(".."); 4 | std::env::set_current_dir(parent_dir).unwrap(); 5 | capnpc::CompilerCommand::new() 6 | .file("schemas/data.capnp") 7 | .file("schemas/service.capnp") 8 | .file("schemas/pack.capnp") 9 | .output_path("src/") 10 | .run() 11 | .expect("schema compiler command"); 12 | } 13 | -------------------------------------------------------------------------------- /schema/schemas/data.capnp: -------------------------------------------------------------------------------- 1 | @0xdbbcd8ccbd223b6c; # unique file ID, generated by `capnp id` 2 | 3 | struct AssetUuid { 4 | id @0 :Data; 5 | } 6 | 7 | struct AssetRef { 8 | union { 9 | uuid @0 :AssetUuid; 10 | path @1 :Data; 11 | } 12 | } 13 | 14 | struct AssetUuidList { 15 | list @0 :List(AssetUuid); 16 | } 17 | 18 | struct KeyValue { 19 | key @0 :Data; 20 | value @1 :Data; 21 | } 22 | 23 | enum FileType { 24 | file @0; 25 | directory @1; 26 | symlink @2; 27 | none @3; 28 | } 29 | 30 | enum FileState { 31 | exists @0; 32 | deleted @1; 33 | } 34 | 35 | struct DirtyFileInfo { 36 | state @0 :FileState; 37 | sourceInfo @1 :SourceFileInfo; 38 | } 39 | 40 | struct SourceFileInfo { 41 | type @0 :FileType; 42 | lastModified @1 :UInt64; 43 | length @2 :UInt64; 44 | } 45 | 46 | struct RenameFileEvent { 47 | src @0 :Data; 48 | dst @1 :Data; 49 | } 50 | 51 | # Type of Asset Source. An Asset Source is something that produces assets and registers metadata about the 52 | # assets with the asset hub, as well as notifying the asset hub about changed assets. 53 | # Currently we only have File Asset Source, but in the future could perhaps have 54 | # remote-sourced asset sources like a GitHub repository. 55 | enum AssetSource { 56 | file @0; 57 | } 58 | 59 | 60 | struct AssetUuidPair { 61 | key @0 :AssetUuid; 62 | value @1 :AssetUuid; 63 | } 64 | 65 | # Metadata associated with a source file 66 | struct SourceMetadata { 67 | assets @0 :List(AssetMetadata); 68 | importerVersion @1 :UInt32; 69 | importerOptionsType @2 :Data; 70 | importerOptions @3 :Data; 71 | importerStateType @4 :Data; 72 | importerState @5 :Data; 73 | buildPipelines @6 :List(AssetUuidPair); 74 | importerType @7 :Data; 75 | union { 76 | error @8 :Error; 77 | noError @9 :Void; 78 | } 79 | pathRefs @10 :List(Data); 80 | importHash @11 :Data; 81 | version @12 :UInt32; 82 | } 83 | 84 | struct PathRefs { 85 | paths @0 :List(Data); 86 | } 87 | 88 | struct Error { 89 | text @0 :Text; 90 | } 91 | 92 | struct ArtifactMetadata { 93 | assetId @0 :AssetUuid; 94 | hash @1 :Data; 95 | loadDeps @2 :List(AssetRef); 96 | buildDeps @3 :List(AssetRef); 97 | compression @4 :CompressionType; 98 | compressedSize @5 :UInt64; 99 | uncompressedSize @6 :UInt64; 100 | typeId @7 :Data; 101 | } 102 | 103 | struct AssetMetadata { 104 | id @0 :AssetUuid; 105 | searchTags @1 :List(KeyValue); 106 | buildPipeline @2 :AssetUuid; 107 | # The most recently recorded artifact of the import function 108 | latestArtifact :union { 109 | artifact @3 :ArtifactMetadata; 110 | none @4 :Void; 111 | } 112 | # The source of the imported asset 113 | source @5 :AssetSource; 114 | union { 115 | error @6 :Error; 116 | noError @7 :Void; 117 | } 118 | } 119 | 120 | # The identifier for a build artifact is the hash of 121 | # - Import artifact hash 122 | # - Build parameters (target platform etc) 123 | # - Build pipeline hash 124 | 125 | # The identifier of an import artifact is the hash of 126 | # - Source file 127 | # - Importer version 128 | # - Importer TypeUUID 129 | # - Importer state 130 | # - Importer state TypeUUID 131 | # - Importer options 132 | # - Importer options TypeUUID 133 | 134 | struct Artifact { 135 | metadata @0 :ArtifactMetadata; 136 | data @1 :Data; # Serialized data as per metadata 137 | } 138 | 139 | struct BuildParameters { 140 | 141 | } 142 | 143 | struct AssetChangeLogEntry { 144 | num @0 :UInt64; 145 | event @1 :AssetChangeEvent; 146 | } 147 | struct AssetChangeEvent { 148 | union { 149 | contentUpdateEvent @0 :AssetContentUpdateEvent; 150 | removeEvent @1 :AssetRemoveEvent; 151 | pathUpdateEvent @2 :PathUpdateEvent; 152 | pathRemoveEvent @3 :PathRemoveEvent; 153 | } 154 | } 155 | 156 | struct AssetContentUpdateEvent { 157 | id @0 :AssetUuid; 158 | importHash @1 :Data; 159 | # `buildDepHash` is the hash of all build dependencies' import hashes sorted by their AssetUUID. 160 | # It can be used to determine if a build artifact needs to be invalidated 161 | # by hashing (import_hash, build_dep_hash, build_pipeline_hash, build_parameters) 162 | buildDepHash @2 :Data; 163 | } 164 | 165 | struct PathUpdateEvent { 166 | path @0 :Data; 167 | } 168 | 169 | struct AssetRemoveEvent { 170 | id @0 :AssetUuid; 171 | } 172 | 173 | struct PathRemoveEvent { 174 | path @0 :Data; 175 | } 176 | 177 | enum CompressionType { 178 | none @0; 179 | lz4 @1; 180 | } 181 | 182 | struct DaemonInfo { 183 | version @0 :UInt32; 184 | } -------------------------------------------------------------------------------- /schema/schemas/pack.capnp: -------------------------------------------------------------------------------- 1 | @0xf9468fe902e7fa8e; # unique file ID, generated by `capnp id` 2 | 3 | using D= import "data.capnp"; 4 | 5 | struct PackFileEntry { 6 | artifact @0 :D.Artifact; 7 | assetMetadata @1 :D.AssetMetadata; 8 | path @2 :Data; 9 | } 10 | 11 | struct PackFile { 12 | entries @0 :List(PackFileEntry); 13 | } 14 | -------------------------------------------------------------------------------- /schema/schemas/service.capnp: -------------------------------------------------------------------------------- 1 | @0x805eb2f9d3deb354; 2 | 3 | using D = import "data.capnp"; 4 | struct AssetPath { 5 | id @0 :D.AssetUuid; 6 | path @1 :Data; 7 | } 8 | struct PathAssets { 9 | path @0 :Data; 10 | assets @1 :List(D.AssetUuid); 11 | } 12 | struct AssetData { 13 | data @0 :Data; 14 | typeId @1 :Data; 15 | } 16 | interface AssetHub { 17 | registerListener @0 (listener :Listener) -> (); 18 | getSnapshot @1 () -> (snapshot :Snapshot); 19 | 20 | interface Snapshot { 21 | getAssetMetadata @0 (assets :List(D.AssetUuid)) -> (assets :List(D.AssetMetadata)); 22 | getAssetMetadataWithDependencies @1 (assets :List(D.AssetUuid)) -> (assets :List(D.AssetMetadata)); 23 | getAllAssetMetadata @2 () -> (assets :List(D.AssetMetadata)); 24 | getLatestAssetChange @3 () -> (num :UInt64); 25 | getAssetChanges @4 (start :UInt64, count :UInt64) -> (changes :List(D.AssetChangeLogEntry)); 26 | getImportArtifacts @5 (assets :List(D.AssetUuid)) -> (artifacts :List(D.Artifact)); 27 | updateAsset @6 (asset :D.Artifact) -> (newImportHash :Data); 28 | patchAsset @7 (assetId :D.AssetUuid, assetHash :Data, patch :AssetData) -> (newImportHash :Data); 29 | 30 | # these are FileAssetSource specific and should probably be moved to another RPC interface 31 | # but on the other hand are useful to have in the same DB snapshot 32 | getPathForAssets @8 (assets :List(D.AssetUuid)) -> (paths :List(AssetPath)); 33 | getAssetsForPaths @9 (paths :List(Data)) -> (assets :List(PathAssets)); 34 | createFile @10 (path :Data, assets :List(AssetData)) -> (newImportHash :Data); 35 | deleteFile @11 (path :Data) -> (); 36 | } 37 | 38 | interface Listener { 39 | # Called on registration and when a batch of asset updates have been processed 40 | update @0 (latestChange :UInt64, snapshot :Snapshot); 41 | } 42 | } 43 | -------------------------------------------------------------------------------- /schema/src/schemas/mod.rs: -------------------------------------------------------------------------------- 1 | #[allow(clippy::all)] 2 | #[allow(dead_code)] 3 | pub mod data_capnp; 4 | #[allow(clippy::all)] 5 | #[allow(dead_code)] 6 | pub mod pack_capnp; 7 | #[allow(clippy::all)] 8 | #[allow(dead_code)] 9 | pub mod service_capnp; 10 | -------------------------------------------------------------------------------- /tests/assets/asset.txt: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/amethyst/distill/852e48272ab3dad76c7490fce8df52461f94ebc0/tests/assets/asset.txt -------------------------------------------------------------------------------- /tests/assets/asset.txt.meta: -------------------------------------------------------------------------------- 1 | ( 2 | version: 2, 3 | importer_options: (), 4 | importer_state: ( 5 | id: Some("b24d209d-6622-4d78-a983-731e8b76f04d"), 6 | ), 7 | ) -------------------------------------------------------------------------------- /tests/assets/asset_a.txt: -------------------------------------------------------------------------------- 1 | # empty file indicates no dependencies 2 | # non-empty file indicates additional assets that are depended on. 3 | # comments begin with `#` 4 | 23da999a-a974-4d0e-918a-f226ea0b3e69 # asset_b.txt 5 | 14f807b9-69ef-484b-9cb8-44787883b86d # asset_d.txt 6 | -------------------------------------------------------------------------------- /tests/assets/asset_a.txt.meta: -------------------------------------------------------------------------------- 1 | ( 2 | version: 2, 3 | importer_options: (), 4 | importer_state: ( 5 | id: Some("d83bb247-2710-4c10-83df-d7daa53e19bf"), 6 | ), 7 | ) -------------------------------------------------------------------------------- /tests/assets/asset_b.txt: -------------------------------------------------------------------------------- 1 | # empty file indicates no dependencies 2 | # non-empty file indicates additional assets that are depended on. 3 | # comments begin with `#` 4 | 40becaa7-cedb-466a-afee-41fecb1c916f # asset_c.txt 5 | -------------------------------------------------------------------------------- /tests/assets/asset_b.txt.meta: -------------------------------------------------------------------------------- 1 | ( 2 | version: 2, 3 | importer_options: (), 4 | importer_state: ( 5 | id: Some("23da999a-a974-4d0e-918a-f226ea0b3e69"), 6 | ), 7 | ) -------------------------------------------------------------------------------- /tests/assets/asset_c.txt: -------------------------------------------------------------------------------- 1 | # empty file indicates no dependencies 2 | # non-empty file indicates additional assets that are depended on. 3 | # comments begin with `#` 4 | -------------------------------------------------------------------------------- /tests/assets/asset_c.txt.meta: -------------------------------------------------------------------------------- 1 | ( 2 | version: 2, 3 | importer_options: (), 4 | importer_state: ( 5 | id: Some("40becaa7-cedb-466a-afee-41fecb1c916f"), 6 | ), 7 | ) -------------------------------------------------------------------------------- /tests/assets/asset_d.txt: -------------------------------------------------------------------------------- 1 | # empty file indicates no dependencies 2 | # non-empty file indicates additional assets that are depended on. 3 | # comments begin with `#` 4 | -------------------------------------------------------------------------------- /tests/assets/asset_d.txt.meta: -------------------------------------------------------------------------------- 1 | ( 2 | version: 2, 3 | importer_options: (), 4 | importer_state: ( 5 | id: Some("14f807b9-69ef-484b-9cb8-44787883b86d"), 6 | ), 7 | ) --------------------------------------------------------------------------------