├── tests ├── db │ ├── mod.rs │ ├── migrations │ │ └── 20231101223300_forest.sql │ └── crud.rs ├── lib.rs ├── postgres.yml ├── json_attr │ ├── migrations │ │ └── 20251019111200_with_json.sql │ ├── mod.rs │ ├── non_nullable.rs │ └── nullable.rs └── postgis │ └── mod.rs ├── atmosphere-macros ├── src │ ├── schema │ │ ├── relation.rs │ │ ├── mod.rs │ │ ├── keys.rs │ │ ├── table.rs │ │ └── column.rs │ ├── derive │ │ ├── queries │ │ │ ├── mod.rs │ │ │ └── unique.rs │ │ ├── mod.rs │ │ ├── hooks.rs │ │ ├── table.rs │ │ ├── bindings.rs │ │ └── relationships.rs │ ├── hooks │ │ └── mod.rs │ └── lib.rs └── Cargo.toml ├── docs ├── src │ ├── getting-started │ │ ├── crud.md │ │ ├── index.md │ │ ├── installation.md │ │ ├── schema.md │ │ └── queries.md │ ├── SUMMARY.md │ ├── traits │ │ ├── index.md │ │ ├── create.md │ │ ├── update.md │ │ ├── read.md │ │ └── delete.md │ └── index.md ├── assets │ ├── logo.png │ └── banner.png └── book.toml ├── .gitignore ├── atmosphere-extras ├── src │ ├── postgis │ │ ├── mod.rs │ │ └── geometry.rs │ └── lib.rs └── Cargo.toml ├── examples ├── forest │ ├── migrations │ │ └── 20230701234331_forest.up.sql │ └── main.rs └── blog │ ├── migrations │ └── 20230701234331_blog.up.sql │ └── main.rs ├── atmosphere-core ├── src │ ├── runtime │ │ ├── mod.rs │ │ └── sql.rs │ ├── error.rs │ ├── schema │ │ ├── create.rs │ │ ├── update.rs │ │ ├── delete.rs │ │ ├── read.rs │ │ └── mod.rs │ ├── hooks │ │ └── mod.rs │ ├── bind.rs │ ├── lib.rs │ ├── rel.rs │ ├── testing.rs │ └── query.rs └── Cargo.toml ├── .github └── workflows │ ├── publish.yml │ ├── book.yml │ └── ci.yml ├── CONTRIBUTING.md ├── src └── lib.rs ├── Cargo.toml ├── flake.nix ├── flake.lock ├── README.md └── LICENSE /tests/db/mod.rs: -------------------------------------------------------------------------------- 1 | mod crud; 2 | -------------------------------------------------------------------------------- /atmosphere-macros/src/schema/relation.rs: -------------------------------------------------------------------------------- 1 | 2 | -------------------------------------------------------------------------------- /docs/src/getting-started/crud.md: -------------------------------------------------------------------------------- 1 | # Use CRUD traits 2 | -------------------------------------------------------------------------------- /docs/assets/logo.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/helsing-ai/atmosphere/HEAD/docs/assets/logo.png -------------------------------------------------------------------------------- /docs/assets/banner.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/helsing-ai/atmosphere/HEAD/docs/assets/banner.png -------------------------------------------------------------------------------- /tests/lib.rs: -------------------------------------------------------------------------------- 1 | mod db; 2 | 3 | #[cfg(feature = "postgis")] 4 | mod postgis; 5 | 6 | mod json_attr; 7 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | .DS_Store 2 | target 3 | Cargo.lock 4 | docs/book 5 | docs/doctest_cache 6 | .vscode 7 | data 8 | -------------------------------------------------------------------------------- /atmosphere-macros/src/schema/mod.rs: -------------------------------------------------------------------------------- 1 | pub mod column; 2 | pub mod keys; 3 | pub mod relation; 4 | pub mod table; 5 | -------------------------------------------------------------------------------- /atmosphere-extras/src/postgis/mod.rs: -------------------------------------------------------------------------------- 1 | //! Implementation of PostGIS geometry types. 2 | 3 | mod geometry; 4 | 5 | pub use geometry::*; 6 | -------------------------------------------------------------------------------- /atmosphere-extras/src/lib.rs: -------------------------------------------------------------------------------- 1 | //! # `🌍 Atmosphere Extras` 2 | //! 3 | //! Implementations for additional database types, such as types from PostGIS plugin for Postgres. 4 | 5 | #[cfg(feature = "postgis")] 6 | pub mod postgis; 7 | -------------------------------------------------------------------------------- /atmosphere-macros/src/derive/queries/mod.rs: -------------------------------------------------------------------------------- 1 | use proc_macro2::TokenStream; 2 | 3 | use crate::schema::table::Table; 4 | 5 | mod unique; 6 | 7 | pub fn queries(table: &Table) -> TokenStream { 8 | unique::queries(table) 9 | } 10 | -------------------------------------------------------------------------------- /tests/postgres.yml: -------------------------------------------------------------------------------- 1 | version: "3.9" 2 | services: 3 | database: 4 | image: imresamu/postgis:16-3.5 5 | environment: 6 | POSTGRES_PASSWORD: atmosphere 7 | POSTGRES_USER: atmosphere 8 | ports: 9 | - 127.0.0.1:5432:5432 10 | -------------------------------------------------------------------------------- /tests/json_attr/migrations/20251019111200_with_json.sql: -------------------------------------------------------------------------------- 1 | CREATE TABLE with_json_non_nullable ( 2 | id INT PRIMARY KEY, 3 | data JSONB NOT NULL 4 | ); 5 | 6 | CREATE TABLE with_json_nullable ( 7 | id INT PRIMARY KEY, 8 | data JSONB 9 | ); 10 | -------------------------------------------------------------------------------- /docs/src/getting-started/index.md: -------------------------------------------------------------------------------- 1 | ## Getting Started 2 | 3 | To get started with atmosphere, install it and set up your first package. Next, 4 | you will want to define your schema. 5 | 6 | * [Installation](installation.md) 7 | * [Schema](schema.md) 8 | * [Queries](queries.md) 9 | -------------------------------------------------------------------------------- /tests/db/migrations/20231101223300_forest.sql: -------------------------------------------------------------------------------- 1 | CREATE TABLE forest ( 2 | id INT PRIMARY KEY, 3 | name TEXT NOT NULL, 4 | location TEXT NOT NULL 5 | ); 6 | 7 | CREATE TABLE tree ( 8 | id INT PRIMARY KEY, 9 | forest_id INT NOT NULL REFERENCES forest(id) ON DELETE CASCADE 10 | ); 11 | -------------------------------------------------------------------------------- /docs/src/getting-started/installation.md: -------------------------------------------------------------------------------- 1 | ## Installation 2 | 3 | ### Install Atmosphere 4 | 5 | The easiest way to get started with atmosphere is to add the current stable 6 | release from [crates.io] as your dependency: 7 | 8 | ```bash 9 | cargo add atmosphere 10 | ``` 11 | 12 | [crates.io]: https://crates.io 13 | -------------------------------------------------------------------------------- /docs/book.toml: -------------------------------------------------------------------------------- 1 | [book] 2 | authors = ["Mara Schulke "] 3 | language = "en" 4 | multilingual = false 5 | src = "src" 6 | title = "Atmosphere" 7 | 8 | [preprocessor.keeper] 9 | command = "mdbook-keeper" 10 | manifest_dir = ".." 11 | is_workspace = true 12 | build_features = ["atmosphere-core/postgres"] 13 | -------------------------------------------------------------------------------- /examples/forest/migrations/20230701234331_forest.up.sql: -------------------------------------------------------------------------------- 1 | CREATE TABLE forest ( 2 | id INT PRIMARY KEY, 3 | name TEXT NOT NULL, 4 | location TEXT NOT NULL, 5 | 6 | created TIMESTAMPTZ NOT NULL 7 | ); 8 | 9 | CREATE TABLE tree ( 10 | id INT PRIMARY KEY, 11 | forest_id INT NOT NULL REFERENCES forest(id) ON DELETE CASCADE 12 | ); 13 | -------------------------------------------------------------------------------- /tests/json_attr/mod.rs: -------------------------------------------------------------------------------- 1 | use serde::{Deserialize, Serialize}; 2 | 3 | mod non_nullable; 4 | mod nullable; 5 | 6 | #[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Clone, Deserialize, Serialize)] 7 | struct Data { 8 | name: String, 9 | } 10 | 11 | impl Data { 12 | fn new(name: &str) -> Self { 13 | Self { 14 | name: name.to_owned(), 15 | } 16 | } 17 | } 18 | -------------------------------------------------------------------------------- /docs/src/SUMMARY.md: -------------------------------------------------------------------------------- 1 | # Summary 2 | 3 | [Introduction](index.md) 4 | 5 | * [Getting Started](getting-started/index.md) 6 | * [Installation](getting-started/installation.md) 7 | * [Schema](getting-started/schema.md) 8 | * [Queries](getting-started/queries.md) 9 | * [Traits](traits/index.md) 10 | * [Create](traits/create.md) 11 | * [Read](traits/read.md) 12 | * [Update](traits/update.md) 13 | * [Delete](traits/delete.md) 14 | -------------------------------------------------------------------------------- /examples/blog/migrations/20230701234331_blog.up.sql: -------------------------------------------------------------------------------- 1 | CREATE TABLE user ( 2 | id INT PRIMARY KEY, 3 | name TEXT NOT NULL UNIQUE, 4 | email TEXT NOT NULL UNIQUE 5 | ); 6 | 7 | CREATE TABLE post ( 8 | id INT PRIMARY KEY, 9 | author INT REFERENCES user(id) ON DELETE CASCADE, 10 | title TEXT NOT NULL UNIQUE, 11 | created_at TIMESTAMPTZ DEFAULT CURRENT_TIMESTAMP, 12 | updated_at TIMESTAMPTZ DEFAULT CURRENT_TIMESTAMP, 13 | deleted_at TIMESTAMPTZ 14 | ); 15 | -------------------------------------------------------------------------------- /docs/src/traits/index.md: -------------------------------------------------------------------------------- 1 | # Traits 2 | 3 | Given that you have annotated your entities with `table`, Atmosphere provides 4 | you with some traits for simple CRUD operations on your database. These make it 5 | very straightforward to do simple operations, but keep in mind that you are 6 | always able to reach down and write raw SQL manually where it makes sense. 7 | 8 | Here are the traits that Atmosphere exposes, along with some examples: 9 | 10 | * [Create](crate.md) 11 | * [Read](read.md) 12 | * [Update](update.md) 13 | * [Delete](delete.md) 14 | -------------------------------------------------------------------------------- /atmosphere-core/src/runtime/mod.rs: -------------------------------------------------------------------------------- 1 | //! Runtime Module for Atmosphere SQL Framework 2 | //! 3 | //! The `runtime` module is a critical component, providing the 4 | //! necessary runtime infrastructure for generating SQL queries. 5 | //! 6 | //! This module includes various utilities and abstractions that facilitate the construction and 7 | //! execution of queries, handling connections, and managing transactions. It acts as the backbone 8 | //! of the framework, ensuring smooth and efficient operations with the database at runtime. 9 | 10 | /// SQL code generator 11 | pub mod sql; 12 | -------------------------------------------------------------------------------- /atmosphere-macros/src/derive/mod.rs: -------------------------------------------------------------------------------- 1 | use proc_macro2::TokenStream; 2 | use quote::quote; 3 | 4 | use crate::schema::table::Table; 5 | 6 | mod bindings; 7 | mod hooks; 8 | mod queries; 9 | mod relationships; 10 | mod table; 11 | 12 | pub fn all(table: &Table) -> TokenStream { 13 | let bindings = bindings::bindings(table); 14 | let queries = queries::queries(table); 15 | let relationships = relationships::relationships(table); 16 | let hooks = hooks::hooks(table); 17 | let table = table::table(table); 18 | 19 | quote!( 20 | #table 21 | 22 | #bindings 23 | 24 | #queries 25 | 26 | #relationships 27 | 28 | #hooks 29 | ) 30 | } 31 | -------------------------------------------------------------------------------- /.github/workflows/publish.yml: -------------------------------------------------------------------------------- 1 | name: Publish to crates.io 2 | on: 3 | push: 4 | tags: ['v*'] # Triggers when pushing tags starting with 'v' 5 | jobs: 6 | publish: 7 | runs-on: ubuntu-latest 8 | environment: release 9 | permissions: 10 | id-token: write 11 | steps: 12 | - uses: actions/checkout@v4 13 | - name: Install latest nightly 14 | uses: actions-rs/toolchain@v1 15 | with: 16 | toolchain: nightly 17 | override: true 18 | components: cargo 19 | - uses: rust-lang/crates-io-auth-action@v1 20 | id: auth 21 | - run: cargo publish -Z package-workspace --workspace 22 | env: 23 | CARGO_REGISTRY_TOKEN: ${{ steps.auth.outputs.token }} 24 | -------------------------------------------------------------------------------- /CONTRIBUTING.md: -------------------------------------------------------------------------------- 1 | # Contributing to `atmosphere` 2 | 3 | We welcome contributions, big and small! 4 | 5 | ## Testing 6 | 7 | Tests in `atmosphere` need a database to work with. 8 | We provide a `docker-compose` service for this, you can use it in the following way: 9 | 10 | ```bash 11 | $ docker-compose -f ./tests/postgres.yml up -d 12 | $ export DATABASE_URL="postgres://atmosphere:atmosphere@localhost" 13 | $ cargo test -F postgres 14 | (... snip ...) 15 | running 4 tests 16 | test db::crud::read ... ok 17 | test db::crud::create ... ok 18 | test db::crud::delete ... ok 19 | test db::crud::update ... ok 20 | 21 | test result: ok. 4 passed; 0 failed; 0 ignored; 0 measured; 0 filtered out; finished in 0.27s 22 | (... snip ...) 23 | ``` 24 | -------------------------------------------------------------------------------- /docs/src/index.md: -------------------------------------------------------------------------------- 1 | # Atmosphere 2 | 3 | Atmosphere is a lightweight SQL framework designed for sustainable, 4 | database-reliant systems. It leverages Rust's powerful type and macro systems 5 | to derive SQL schemas from your rust struct definitions into an advanced trait 6 | system. 7 | 8 | It works by leveraging the [`sqlx`][] crate and the Rust macro system to allow 9 | you to work easily with relational-database mapped entities, while still 10 | enabling low level usage of the underlying `sqlx` concepts. 11 | 12 | ### Sections 13 | 14 | **[Getting Started](getting-started/index.md)** 15 | 16 | **[Traits](traits/index.md)** 17 | 18 | [GitHub]: https://github.com/helsing-ai/atmosphere/tree/main 19 | [`sqlx`]: https://github.com/launchbadge/sqlx 20 | -------------------------------------------------------------------------------- /atmosphere-extras/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "atmosphere-extras" 3 | description = "Implementations for additional database types." 4 | version.workspace = true 5 | license.workspace = true 6 | edition.workspace = true 7 | authors.workspace = true 8 | repository.workspace = true 9 | 10 | [features] 11 | default = [] 12 | postgis = ["sqlx/postgres", "dep:geozero", "dep:geo-types"] 13 | serde = ["dep:serde", "geo-types/serde"] 14 | 15 | [dependencies] 16 | geo-types = { version = "0.7", optional = true } 17 | geozero = { version = "0.14", features = [ 18 | "with-postgis-sqlx", 19 | ], optional = true } 20 | serde = { workspace = true, features = ["derive"], optional = true } 21 | sqlx.workspace = true 22 | thiserror.workspace = true 23 | 24 | [dev-dependencies] 25 | serde_json = "1" 26 | 27 | [package.metadata.docs.rs] 28 | features = ["postgis"] 29 | -------------------------------------------------------------------------------- /atmosphere-core/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "atmosphere-core" 3 | description = "Core trait system and types for the atmosphere project" 4 | version.workspace = true 5 | license.workspace = true 6 | edition.workspace = true 7 | authors.workspace = true 8 | repository.workspace = true 9 | 10 | [features] 11 | default = ["runtime-tokio-rustls"] 12 | runtime-async-std-native-tls = ["sqlx/runtime-async-std-native-tls"] 13 | runtime-tokio-native-tls = ["sqlx/runtime-tokio-native-tls"] 14 | runtime-async-std-rustls = ["sqlx/runtime-async-std-rustls"] 15 | runtime-tokio-rustls = ["sqlx/runtime-tokio-rustls"] 16 | mysql = ["sqlx/mysql"] 17 | postgres = ["sqlx/postgres"] 18 | sqlite = ["sqlx/sqlite"] 19 | 20 | [dependencies] 21 | async-trait.workspace = true 22 | sqlx.workspace = true 23 | thiserror.workspace = true 24 | lazy_static.workspace = true 25 | miette = "7" 26 | 27 | [package.metadata.docs.rs] 28 | features = ["postgres"] 29 | -------------------------------------------------------------------------------- /atmosphere-macros/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "atmosphere-macros" 3 | description = "Macro crate of atmosphere" 4 | version.workspace = true 5 | license.workspace = true 6 | edition.workspace = true 7 | authors.workspace = true 8 | repository.workspace = true 9 | 10 | [lib] 11 | proc-macro = true 12 | 13 | [dependencies] 14 | atmosphere-core.workspace = true 15 | sqlx.workspace = true 16 | proc-macro2 = { version = "1.0.36", default-features = false } 17 | syn = { version = "2.0.39", default-features = false, features = [ 18 | "parsing", 19 | "proc-macro", 20 | ] } 21 | quote = { version = "1.0.14", default-features = false } 22 | lazy_static = "1.4.0" 23 | 24 | [features] 25 | default = [] 26 | mysql = ["atmosphere-core/mysql"] 27 | postgres = ["atmosphere-core/postgres"] 28 | sqlite = ["atmosphere-core/sqlite"] 29 | 30 | [dev-dependencies] 31 | chrono = "0.4.31" 32 | 33 | [package.metadata.docs.rs] 34 | features = ["postgres"] 35 | -------------------------------------------------------------------------------- /atmosphere-macros/src/hooks/mod.rs: -------------------------------------------------------------------------------- 1 | #[derive(Clone, Debug, Default)] 2 | pub struct Hooks { 3 | pub registered: Vec, 4 | } 5 | 6 | impl syn::parse::Parse for Hooks { 7 | fn parse(input: syn::parse::ParseStream) -> syn::Result { 8 | let mut hooks = vec![]; 9 | 10 | while !input.is_empty() { 11 | let expr: syn::Expr = input.parse()?; 12 | 13 | match expr { 14 | syn::Expr::Path(_) | syn::Expr::Struct(_) => { 15 | hooks.push(expr); 16 | } 17 | _ => { 18 | return Err(syn::Error::new_spanned( 19 | expr, 20 | "`#[hooks(..)]` only supports paths or struct literals", 21 | )); 22 | } 23 | } 24 | 25 | if input.peek(syn::Token![,]) { 26 | input.parse::()?; 27 | } 28 | } 29 | 30 | Ok(Self { registered: hooks }) 31 | } 32 | } 33 | -------------------------------------------------------------------------------- /.github/workflows/book.yml: -------------------------------------------------------------------------------- 1 | name: Book 2 | 3 | on: 4 | push: 5 | branches: ["main"] 6 | workflow_dispatch: 7 | 8 | permissions: 9 | contents: read 10 | pages: write 11 | id-token: write 12 | 13 | concurrency: 14 | group: "pages" 15 | cancel-in-progress: false 16 | 17 | jobs: 18 | deploy: 19 | environment: 20 | name: Book 21 | url: ${{ steps.deployment.outputs.page_url }} 22 | runs-on: ubuntu-latest 23 | steps: 24 | - name: Checkout 25 | uses: actions/checkout@v5 26 | - name: Setup Pages 27 | uses: actions/configure-pages@v5 28 | - name: Setup mdbook 29 | uses: peaceiris/actions-mdbook@v2 30 | - uses: Swatinem/rust-cache@v2 31 | - run: cargo install mdbook-keeper 32 | - run: cd docs && mdbook build 33 | - name: Upload artifacts 34 | uses: actions/upload-pages-artifact@v4 35 | with: 36 | path: 'docs/book' 37 | - name: Deploy to GitHub Pages 38 | id: deployment 39 | uses: actions/deploy-pages@v4 40 | -------------------------------------------------------------------------------- /docs/src/traits/create.md: -------------------------------------------------------------------------------- 1 | # Create 2 | 3 | The [`Create`] trait allows you to create new rows in your tables. Here is an example 4 | of how to create a user, given that you have it annotated with [`table`]: 5 | 6 | ```rust 7 | # extern crate atmosphere; 8 | # extern crate sqlx; 9 | # extern crate tokio; 10 | # use atmosphere::prelude::*; 11 | #[derive(Debug, PartialEq)] 12 | #[table(schema = "public", name = "user")] 13 | struct User { 14 | #[sql(pk)] 15 | id: i32, 16 | name: String, 17 | #[sql(unique)] 18 | email: String, 19 | } 20 | 21 | # async fn test() -> std::result::Result<(), Box> { 22 | let database = std::env::var("DATABASE_URL").unwrap(); 23 | let pool = atmosphere::Pool::connect(&database).await?; 24 | 25 | let mut user = User { 26 | id: 0, 27 | name: "demo".to_owned(), 28 | email: "some@email.com".to_owned(), 29 | }; 30 | 31 | user.create(&pool).await?; 32 | # Ok(()) 33 | # } 34 | # fn main() {} 35 | ``` 36 | 37 | [`table`]: https://docs.rs/atmosphere/latest/atmosphere/attr.table.html 38 | [`Create`]: https://docs.rs/atmosphere/latest/atmosphere/trait.Create.html 39 | -------------------------------------------------------------------------------- /docs/src/traits/update.md: -------------------------------------------------------------------------------- 1 | # Update 2 | 3 | The [`Update`] trait allows you to read entities from rows in your table. Here is 4 | an example of how to create a user, given that you have it annotated with 5 | [`table`]: 6 | 7 | ```rust 8 | # extern crate atmosphere; 9 | # extern crate sqlx; 10 | # extern crate tokio; 11 | # use atmosphere::prelude::*; 12 | #[derive(Debug, PartialEq)] 13 | #[table(schema = "public", name = "user")] 14 | struct User { 15 | #[sql(pk)] 16 | id: i32, 17 | name: String, 18 | #[sql(unique)] 19 | email: String, 20 | } 21 | 22 | # async fn test() -> std::result::Result<(), Box> { 23 | let database = std::env::var("DATABASE_URL").unwrap(); 24 | let pool = atmosphere::Pool::connect(&database).await?; 25 | 26 | // find user by primary key 27 | let mut user = User::find(&pool, &0).await?; 28 | 29 | user.email = "joe@example.com".into(); 30 | 31 | // update user data 32 | user.update(&pool).await?; 33 | # Ok(()) 34 | # } 35 | # fn main() {} 36 | ``` 37 | 38 | [`table`]: https://docs.rs/atmosphere/latest/atmosphere/attr.table.html 39 | [`Update`]: https://docs.rs/atmosphere/latest/atmosphere/trait.Update.html 40 | -------------------------------------------------------------------------------- /atmosphere-macros/src/schema/keys.rs: -------------------------------------------------------------------------------- 1 | use proc_macro2::TokenStream; 2 | use quote::quote; 3 | use syn::{Ident, Type}; 4 | 5 | use super::column::{ColumnModifiers, NameSet}; 6 | 7 | #[derive(Clone, Debug, PartialEq, Eq, Hash)] 8 | pub struct PrimaryKey { 9 | pub modifiers: ColumnModifiers, 10 | pub name: NameSet, 11 | pub ty: Type, 12 | } 13 | 14 | impl PrimaryKey { 15 | pub fn quote(&self) -> TokenStream { 16 | let field = self.name.field(); 17 | let sql = self.name.sql(); 18 | 19 | quote!(::atmosphere::PrimaryKey::new( 20 | stringify!(#field), 21 | stringify!(#sql) 22 | )) 23 | } 24 | } 25 | 26 | #[derive(Clone, Debug, PartialEq, Eq, Hash)] 27 | pub struct ForeignKey { 28 | pub on: Ident, 29 | pub modifiers: ColumnModifiers, 30 | pub name: NameSet, 31 | pub ty: Type, 32 | } 33 | 34 | impl ForeignKey { 35 | pub fn quote(&self) -> TokenStream { 36 | let field = self.name.field(); 37 | let sql = self.name.sql(); 38 | 39 | quote!(::atmosphere::ForeignKey::new( 40 | stringify!(#field), 41 | stringify!(#sql) 42 | )) 43 | } 44 | } 45 | -------------------------------------------------------------------------------- /docs/src/traits/read.md: -------------------------------------------------------------------------------- 1 | # Read 2 | 3 | The [`Read`] trait allows you to read entities from rows in your table. Here is 4 | an example of how to create a user, given that you have it annotated with 5 | [`table`]: 6 | 7 | ```rust 8 | # extern crate atmosphere; 9 | # extern crate sqlx; 10 | # extern crate tokio; 11 | # use atmosphere::prelude::*; 12 | #[derive(Debug, PartialEq)] 13 | #[table(schema = "public", name = "user")] 14 | struct User { 15 | #[sql(pk)] 16 | id: i32, 17 | name: String, 18 | #[sql(unique)] 19 | email: String, 20 | } 21 | 22 | # async fn test() -> std::result::Result<(), Box> { 23 | let database = std::env::var("DATABASE_URL").unwrap(); 24 | let pool = atmosphere::Pool::connect(&database).await?; 25 | 26 | // fetch all users 27 | let users = User::find_all(&pool).await?; 28 | 29 | // find user by primary key 30 | let mut user = User::find(&pool, &0).await?; 31 | 32 | // refresh user data 33 | user.reload(&pool).await?; 34 | # Ok(()) 35 | # } 36 | # fn main() {} 37 | ``` 38 | 39 | [`table`]: https://docs.rs/atmosphere/latest/atmosphere/attr.table.html 40 | [`Read`]: https://docs.rs/atmosphere/latest/atmosphere/trait.Read.html 41 | -------------------------------------------------------------------------------- /docs/src/traits/delete.md: -------------------------------------------------------------------------------- 1 | # Delete 2 | 3 | The [`Delete`] trait allows you to read entities from rows in your table. Here is 4 | an example of how to create a user, given that you have it annotated with 5 | [`table`]: 6 | 7 | ```rust 8 | # extern crate atmosphere; 9 | # extern crate sqlx; 10 | # extern crate tokio; 11 | # use atmosphere::prelude::*; 12 | #[derive(Debug, PartialEq)] 13 | #[table(schema = "public", name = "user")] 14 | struct User { 15 | #[sql(pk)] 16 | id: i32, 17 | name: String, 18 | #[sql(unique)] 19 | email: String, 20 | } 21 | 22 | # async fn test() -> std::result::Result<(), Box> { 23 | let database = std::env::var("DATABASE_URL").unwrap(); 24 | let pool = atmosphere::Pool::connect(&database).await?; 25 | 26 | // find user by primary key 27 | let mut user = User::find(&pool, &0).await?; 28 | 29 | // delete user data 30 | user.delete(&pool).await?; 31 | 32 | // delete by primary key 33 | User::delete_by(&pool, &4).await?; 34 | # Ok(()) 35 | # } 36 | # fn main() {} 37 | ``` 38 | 39 | [`table`]: https://docs.rs/atmosphere/latest/atmosphere/attr.table.html 40 | [`Delete`]: https://docs.rs/atmosphere/latest/atmosphere/trait.Delete.html 41 | -------------------------------------------------------------------------------- /examples/blog/main.rs: -------------------------------------------------------------------------------- 1 | use atmosphere::prelude::*; 2 | 3 | use sqlx::types::chrono; 4 | 5 | #[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Clone)] 6 | #[table(schema = "public", name = "user")] 7 | struct User { 8 | #[sql(pk)] 9 | id: i32, 10 | name: String, 11 | #[sql(unique)] 12 | email: String, 13 | } 14 | 15 | #[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Clone)] 16 | #[table(schema = "public", name = "post")] 17 | struct Post { 18 | #[sql(pk)] 19 | id: i32, 20 | #[sql(fk -> User)] 21 | author: i32, 22 | #[sql(unique)] 23 | title: String, 24 | 25 | #[sql(timestamp = created)] 26 | created_at: chrono::DateTime, 27 | #[sql(timestamp = updated)] 28 | updated_at: chrono::DateTime, 29 | #[sql(timestamp = deleted)] 30 | deleted_at: chrono::DateTime, 31 | } 32 | 33 | #[tokio::main] 34 | async fn main() -> atmosphere::Result<()> { 35 | let pool = Pool::connect(":memory:").await.unwrap(); 36 | 37 | sqlx::migrate!("examples/blog/migrations") 38 | .run(&pool) 39 | .await 40 | .unwrap(); 41 | 42 | User { 43 | id: 0, 44 | name: "our".to_owned(), 45 | email: "some@email.com".to_owned(), 46 | } 47 | .create(&pool) 48 | .await?; 49 | 50 | Ok(()) 51 | } 52 | -------------------------------------------------------------------------------- /examples/forest/main.rs: -------------------------------------------------------------------------------- 1 | use atmosphere::prelude::*; 2 | 3 | use sqlx::types::chrono; 4 | 5 | #[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Clone)] 6 | #[table(schema = "public", name = "forest")] 7 | struct Forest { 8 | #[sql(pk)] 9 | id: i32, 10 | #[sql(unique)] 11 | name: String, 12 | #[sql(unique)] 13 | location: String, 14 | #[sql(timestamp = created)] 15 | created: chrono::DateTime, 16 | } 17 | 18 | #[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Clone)] 19 | #[table(schema = "public", name = "tree")] 20 | struct Tree { 21 | #[sql(pk)] 22 | id: i32, 23 | #[sql(fk -> Forest, rename = "forest_id")] 24 | forest: i32, 25 | } 26 | 27 | #[tokio::main] 28 | async fn main() -> atmosphere::Result<()> { 29 | let pool = Pool::connect(":memory:").await.unwrap(); 30 | 31 | sqlx::migrate!("examples/forest/migrations") 32 | .run(&pool) 33 | .await 34 | .unwrap(); 35 | 36 | let mut forest = Forest { 37 | id: 0, 38 | name: "our".to_owned(), 39 | location: "forest".to_owned(), 40 | created: chrono::Utc::now(), 41 | }; 42 | 43 | forest.create(&pool).await?; 44 | 45 | for id in 0..5 { 46 | Tree { 47 | id, 48 | forest: forest.id, 49 | } 50 | .create(&pool) 51 | .await?; 52 | } 53 | 54 | assert_eq!(forest.trees(&pool).await?.len(), 5); 55 | 56 | forest.delete_trees(&pool).await?; 57 | 58 | assert_eq!(forest.trees(&pool).await?.len(), 0); 59 | 60 | Ok(()) 61 | } 62 | -------------------------------------------------------------------------------- /atmosphere-macros/src/derive/hooks.rs: -------------------------------------------------------------------------------- 1 | use proc_macro2::TokenStream; 2 | use quote::quote; 3 | 4 | use crate::schema::table::Table; 5 | 6 | pub fn hooks(table: &Table) -> TokenStream { 7 | let ident = &table.ident; 8 | let registered = &table.hooks.registered; 9 | 10 | //let mut derived: Vec = vec![]; 11 | //let mut hooks = TokenStream::new(); 12 | 13 | //for timestamp in table.timestamp_columns.iter() { 14 | //let field = timestamp.name.field(); 15 | 16 | //let hook = syn::Ident::new( 17 | //&format!( 18 | //"__{}TimestampSetter{}", 19 | //ident.to_string(), 20 | //field.to_string() 21 | //), 22 | //field.span(), 23 | //); 24 | 25 | //hooks.extend(quote!( 26 | //struct #hook; 27 | 28 | //#[async_trait::async_trait] 29 | //impl Hook<#ident> for #hook { 30 | //fn stage(&self) -> HookStage { HookStage::PreBind } 31 | 32 | //async fn apply(&self, ctx: &Query<#ident>, input: &mut HookInput<'_, #ident>) -> Result<()> { 33 | //println!( 34 | //"atmosphere::set::{}.{} because {:?} {:?}", 35 | //stringify!(#ident), stringify!(#field), 36 | //ctx.op, 37 | //ctx.cardinality, 38 | //); 39 | 40 | //Ok(()) 41 | //} 42 | //} 43 | //)); 44 | 45 | ////derived.push(hook); 46 | //} 47 | //#(&#derived,),* 48 | 49 | quote!( 50 | #[automatically_derived] 51 | impl ::atmosphere::hooks::Hooks for #ident { 52 | const HOOKS: &'static [&'static dyn ::atmosphere::hooks::Hook<#ident>] = &[ 53 | #(&#registered,),* 54 | ]; 55 | } 56 | ) 57 | } 58 | -------------------------------------------------------------------------------- /atmosphere-macros/src/derive/table.rs: -------------------------------------------------------------------------------- 1 | use proc_macro2::TokenStream; 2 | use quote::quote; 3 | 4 | use crate::schema::table::Table; 5 | 6 | pub fn table(table: &Table) -> TokenStream { 7 | let Table { 8 | ident, 9 | id, 10 | primary_key, 11 | foreign_keys, 12 | data_columns, 13 | timestamp_columns, 14 | .. 15 | } = table; 16 | 17 | let schema = id.schema.to_string(); 18 | let table_name = id.table.to_string(); 19 | 20 | let pk_ty = &table.primary_key.ty; 21 | let pk_field = &table.primary_key.name.field(); 22 | 23 | let primary_key = primary_key.quote(); 24 | let foreign_keys = foreign_keys.iter().map(|r| r.quote()); 25 | let data = data_columns.iter().map(|d| d.quote()); 26 | let timestamps = timestamp_columns.iter().map(|d| d.quote()); 27 | 28 | quote!( 29 | #[automatically_derived] 30 | impl ::atmosphere::Table for #ident { 31 | type PrimaryKey = #pk_ty; 32 | 33 | const SCHEMA: &'static str = #schema; 34 | const TABLE: &'static str = #table_name; 35 | 36 | const PRIMARY_KEY: ::atmosphere::PrimaryKey<#ident> = #primary_key; 37 | const FOREIGN_KEYS: &'static [::atmosphere::ForeignKey<#ident>] = &[#(#foreign_keys),*]; 38 | const DATA_COLUMNS: &'static [::atmosphere::DataColumn<#ident>] = &[#(#data),*]; 39 | const TIMESTAMP_COLUMNS: &'static [::atmosphere::TimestampColumn<#ident>] = &[#(#timestamps),*]; 40 | 41 | fn pk(&self) -> &Self::PrimaryKey { 42 | &self.#pk_field 43 | } 44 | } 45 | ) 46 | } 47 | -------------------------------------------------------------------------------- /src/lib.rs: -------------------------------------------------------------------------------- 1 | //! # `🌍 Atmosphere` 2 | //! 3 | //! **A lightweight sql framework for sustainable database reliant systems** 4 | //! 5 | //! ## Overview 6 | //! 7 | //! Atmosphere is a lightweight SQL framework designed for sustainable, database-reliant systems. 8 | //! It leverages Rust's powerful type and macro systems to derive SQL schemas from your rust struct 9 | //! definitions into an advanced trait system. 10 | //! 11 | //! Atmosphere provides a suite of modules and types that abstract and facilitate various aspects 12 | //! of database operations, from query construction and execution to error handling and schema 13 | //! management. 14 | //! 15 | //! ## Key Features 16 | //! 17 | //! - SQL schema derivation from Rust structs. 18 | //! - Advanced trait system for query generation. 19 | //! - Automated database code testing with `atmosphere::testing` 20 | //! - ORM-like CRUD traits. 21 | //! - Code reusability across API layers using generics. 22 | //! - Compile-time introspection for type-safe schema generation. 23 | //! 24 | //! ## Usage 25 | //! 26 | //! To use this crate you must activate **one** of the following features (else the crate is empty): 27 | //! 28 | //! - `mysql` 29 | //! - `postgres` 30 | //! - `sqlite` 31 | 32 | #![cfg(any(feature = "postgres", feature = "mysql", feature = "sqlite"))] 33 | 34 | pub use atmosphere_core::*; 35 | pub use atmosphere_macros::*; 36 | 37 | #[cfg(feature = "postgis")] 38 | pub use atmosphere_extras::postgis; 39 | 40 | /// A prelude module for bringing commonly used types into scope 41 | pub mod prelude { 42 | pub use async_trait::async_trait; 43 | pub use atmosphere_core::*; 44 | pub use atmosphere_macros::*; 45 | pub use sqlx; 46 | } 47 | -------------------------------------------------------------------------------- /docs/src/getting-started/schema.md: -------------------------------------------------------------------------------- 1 | # Define your Schema 2 | 3 | To make use of Atmosphere, you must define your schema in a way that Atmosphere 4 | can understand it. To do so, you use Rust structs augmented with the [`table`] 5 | attribute macro and some metadata which tells it how to map it to SQL. 6 | 7 | Here is an example of what such a schema might look like if you are storing 8 | users and posts in a database. 9 | 10 | ```rust 11 | # extern crate atmosphere; 12 | # extern crate sqlx; 13 | use atmosphere::prelude::*; 14 | 15 | #[table(schema = "public", name = "user")] 16 | struct User { 17 | #[sql(pk)] 18 | id: i32, 19 | name: String, 20 | #[sql(unique)] 21 | email: String, 22 | } 23 | 24 | #[table(schema = "public", name = "post")] 25 | struct Post { 26 | #[sql(pk)] 27 | id: i32, 28 | #[sql(fk -> User, rename = "author_id")] 29 | author: i32, 30 | #[sql(unique)] 31 | title: String, 32 | } 33 | # fn main() { 34 | # } 35 | ``` 36 | 37 | ## Table properties 38 | 39 | Every type you annotate like this corresponds to one table in your Postgres 40 | database. You must set the table and schema name of the entities by setting 41 | the appropriate keys on the `#[table]` annotation. 42 | 43 | ```rust 44 | # extern crate atmosphere; 45 | # extern crate sqlx; 46 | # use atmosphere::prelude::*; 47 | #[table(schema = "public", name = "users")] 48 | struct User { 49 | # #[sql(pk)] 50 | # id: i32, 51 | // ... 52 | } 53 | # fn main() { 54 | # } 55 | ``` 56 | 57 | ## Column properties 58 | 59 | Every struct member corresponds to one row of your backing table. Here you can 60 | use the `#[sql]` annotation to add metadata. 61 | 62 | [`table`]: https://docs.rs/atmosphere/latest/atmosphere/attr.table.html 63 | -------------------------------------------------------------------------------- /.github/workflows/ci.yml: -------------------------------------------------------------------------------- 1 | name: CI 2 | 3 | on: 4 | push: 5 | branches: ["main"] 6 | pull_request: 7 | 8 | env: 9 | MINIMUM_LINE_COVERAGE_PERCENT: 0 10 | 11 | jobs: 12 | fmt: 13 | runs-on: ubuntu-latest 14 | steps: 15 | - uses: actions/checkout@v3 16 | - run: rustup update && rustup component add rustfmt 17 | - run: cargo fmt --check --all 18 | 19 | clippy: 20 | runs-on: ubuntu-latest 21 | steps: 22 | - uses: actions/checkout@v3 23 | - run: rustup update && rustup component add clippy 24 | - uses: Swatinem/rust-cache@v2 25 | - run: cargo clippy --all-targets --workspace --features postgres -- -D warnings -D clippy::all 26 | 27 | # Examples require the `sqlite` feature 28 | clippy-examples: 29 | runs-on: ubuntu-latest 30 | steps: 31 | - uses: actions/checkout@v3 32 | - run: rustup update && rustup component add clippy 33 | - uses: Swatinem/rust-cache@v2 34 | - run: cargo clippy --all-targets --workspace --features sqlite -- -D warnings -D clippy::all 35 | 36 | test: 37 | runs-on: ubuntu-latest 38 | steps: 39 | - uses: actions/checkout@v5 40 | with: 41 | lfs: "true" 42 | - uses: isbang/compose-action@v2 43 | with: 44 | compose-file: "./tests/postgres.yml" 45 | - run: rustup update 46 | - uses: Swatinem/rust-cache@v2 47 | - run: cargo test --workspace --features postgres 48 | env: 49 | RUST_BACKTRACE: 1 50 | DATABASE_URL: postgres://atmosphere:atmosphere@localhost:5432 51 | 52 | typos: 53 | runs-on: ubuntu-latest 54 | steps: 55 | - uses: actions/checkout@v5 56 | - uses: Swatinem/rust-cache@v2 57 | - run: cargo install typos-cli || true 58 | - run: typos 59 | -------------------------------------------------------------------------------- /atmosphere-core/src/error.rs: -------------------------------------------------------------------------------- 1 | //! Error Handling Module for Atmosphere 2 | //! 3 | //! This module defines the error handling mechanisms used throughout the Atmosphere framework. It 4 | //! provides a comprehensive `Error` type that encapsulates various kinds of errors that may occur 5 | //! during database operations, file IO, and other framework-related activities. 6 | //! 7 | //! The module simplifies error management by categorizing common error types and providing a 8 | //! unified interface for handling them. This approach enhances code readability and 9 | //! maintainability, especially in scenarios involving complex database interactions and 10 | //! operations. 11 | 12 | use miette::Diagnostic; 13 | use thiserror::Error; 14 | 15 | use crate::{BindError, query::QueryError}; 16 | 17 | /// Errors that can occur within Atmosphere. 18 | /// 19 | /// This enum encapsulates a range of errors including IO errors, query-related errors, binding 20 | /// errors, and others. It is designed to provide a unified error handling mechanism across 21 | /// different components of the framework. 22 | #[derive(Debug, Diagnostic, Error)] 23 | #[non_exhaustive] 24 | pub enum Error { 25 | #[error("io")] 26 | #[diagnostic(code(atmosphere::io))] 27 | Io(#[from] std::io::Error), 28 | 29 | #[error("query")] 30 | #[diagnostic(transparent)] 31 | Query(#[from] QueryError), 32 | 33 | #[error("bind")] 34 | #[diagnostic(transparent)] 35 | Bind(#[from] BindError), 36 | 37 | #[error("other")] 38 | #[diagnostic(code(atmosphere::other))] 39 | Other, 40 | 41 | #[error("internal")] 42 | #[diagnostic(code(atmosphere::internal))] 43 | Internal, 44 | } 45 | 46 | /// A specialized `Result` type for use throughout the Atmosphere framework. 47 | /// 48 | /// This type alias simplifies error handling by using the `Error` enum as the default error type. 49 | /// It is used as the return type for functions and methods within the framework, where errors are 50 | /// expected to be one of the variants defined in the `Error` enum. 51 | pub type Result = std::result::Result; 52 | -------------------------------------------------------------------------------- /tests/json_attr/non_nullable.rs: -------------------------------------------------------------------------------- 1 | use atmosphere::prelude::*; 2 | use atmosphere_core::Table; 3 | 4 | use super::Data; 5 | 6 | #[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Clone)] 7 | #[table(name = "with_json_nullable", schema = "public")] 8 | struct WithJson { 9 | #[sql(pk)] 10 | id: i32, 11 | #[sql(json)] 12 | #[sqlx(json)] 13 | data: Data, 14 | } 15 | 16 | #[sqlx::test(migrations = "tests/json_attr/migrations")] 17 | async fn create(pool: sqlx::PgPool) { 18 | atmosphere::testing::create( 19 | &pool, 20 | WithJson { 21 | id: 0, 22 | data: Data::new("Maria Agnesi"), 23 | }, 24 | ) 25 | .await; 26 | 27 | WithJson { 28 | id: 1, 29 | data: Data::new("Elizabeth Garrett Anderson"), 30 | } 31 | .upsert(&pool) 32 | .await 33 | .unwrap(); 34 | } 35 | 36 | #[sqlx::test(migrations = "tests/json_attr/migrations")] 37 | async fn read(pool: sqlx::PgPool) { 38 | atmosphere::testing::read( 39 | &pool, 40 | WithJson { 41 | id: 0, 42 | data: Data::new("Florence Augusta Merriam Bailey"), 43 | }, 44 | ) 45 | .await; 46 | } 47 | 48 | #[sqlx::test(migrations = "tests/json_attr/migrations")] 49 | async fn update(pool: sqlx::PgPool) { 50 | atmosphere::testing::update( 51 | &pool, 52 | WithJson { 53 | id: 0, 54 | data: Data::new("Laura Maria Caterina Bassi"), 55 | }, 56 | vec![ 57 | WithJson { 58 | id: 0, 59 | data: Data::new("Ruth Benerito"), 60 | }, 61 | WithJson { 62 | id: 0, 63 | data: Data::new("Marie Curie"), 64 | }, 65 | ], 66 | ) 67 | .await; 68 | } 69 | 70 | #[sqlx::test(migrations = "tests/json_attr/migrations")] 71 | async fn delete(pool: sqlx::PgPool) { 72 | atmosphere::testing::delete( 73 | &pool, 74 | WithJson { 75 | id: 0, 76 | data: Data::new("Harriet Brooks"), 77 | }, 78 | ) 79 | .await; 80 | } 81 | -------------------------------------------------------------------------------- /tests/postgis/mod.rs: -------------------------------------------------------------------------------- 1 | use atmosphere::postgis::{Point, Polygon}; 2 | use atmosphere::{Create, Read, Table as _, table}; 3 | use sqlx::{Executor, PgPool}; 4 | 5 | #[derive(Debug, PartialEq)] 6 | #[table(schema = "public", name = "with_point")] 7 | struct WithPoint { 8 | #[sql(pk)] 9 | id: i32, 10 | point: Point, 11 | } 12 | 13 | #[derive(Debug, PartialEq)] 14 | #[table(schema = "public", name = "with_polygon")] 15 | struct WithPolygon { 16 | #[sql(pk)] 17 | id: i32, 18 | polygon: Polygon, 19 | } 20 | 21 | #[sqlx::test] 22 | async fn point_roundtrip(pool: PgPool) { 23 | pool.execute("CREATE EXTENSION postgis").await.unwrap(); 24 | pool.execute( 25 | r#" 26 | CREATE TABLE with_point ( 27 | id INT PRIMARY KEY, 28 | point public.geometry (Point, 4326) 29 | ) 30 | "#, 31 | ) 32 | .await 33 | .unwrap(); 34 | 35 | let mut point = WithPoint { 36 | id: 42, 37 | point: Point::new(4., 2.), 38 | }; 39 | 40 | let result = point.create(&pool).await.unwrap(); 41 | 42 | assert_eq!(result.rows_affected(), 1); 43 | 44 | let from_db = WithPoint::read(&pool, &42).await.unwrap(); 45 | 46 | assert_eq!(point, from_db); 47 | } 48 | 49 | #[sqlx::test] 50 | async fn polygon_roundtrip(pool: PgPool) { 51 | pool.execute("CREATE EXTENSION postgis").await.unwrap(); 52 | pool.execute( 53 | r#" 54 | CREATE TABLE with_polygon ( 55 | id INT PRIMARY KEY, 56 | polygon public.geometry (Polygon, 4326) 57 | ) 58 | "#, 59 | ) 60 | .await 61 | .unwrap(); 62 | 63 | let mut with_polygon = WithPolygon { 64 | id: 42, 65 | polygon: Polygon::from_iter([ 66 | Point::new(0., 0.), 67 | Point::new(1., 0.), 68 | Point::new(1., 1.), 69 | Point::new(0., 1.), 70 | ]), 71 | }; 72 | 73 | let result = with_polygon.create(&pool).await.unwrap(); 74 | 75 | assert_eq!(result.rows_affected(), 1); 76 | 77 | let from_db = WithPolygon::read(&pool, &42).await.unwrap(); 78 | 79 | assert_eq!(with_polygon, from_db); 80 | } 81 | -------------------------------------------------------------------------------- /tests/json_attr/nullable.rs: -------------------------------------------------------------------------------- 1 | use atmosphere::prelude::*; 2 | use atmosphere_core::Table; 3 | 4 | use super::Data; 5 | 6 | #[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Clone)] 7 | #[table(name = "with_json_nullable", schema = "public")] 8 | struct WithJson { 9 | #[sql(pk)] 10 | id: i32, 11 | #[sql(json)] 12 | #[sqlx(json(nullable))] 13 | data: Option, 14 | } 15 | 16 | #[sqlx::test(migrations = "tests/json_attr/migrations")] 17 | async fn create(pool: sqlx::PgPool) { 18 | atmosphere::testing::create( 19 | &pool, 20 | WithJson { 21 | id: 0, 22 | data: Some(Data::new("Maria Agnesi")), 23 | }, 24 | ) 25 | .await; 26 | 27 | atmosphere::testing::create(&pool, WithJson { id: 1, data: None }).await; 28 | 29 | WithJson { 30 | id: 2, 31 | data: Some(Data::new("Elizabeth Garrett Anderson")), 32 | } 33 | .upsert(&pool) 34 | .await 35 | .unwrap(); 36 | 37 | WithJson { id: 3, data: None }.upsert(&pool).await.unwrap(); 38 | } 39 | 40 | #[sqlx::test(migrations = "tests/json_attr/migrations")] 41 | async fn read_some(pool: sqlx::PgPool) { 42 | atmosphere::testing::read( 43 | &pool, 44 | WithJson { 45 | id: 0, 46 | data: Some(Data::new("Florence Augusta Merriam Bailey")), 47 | }, 48 | ) 49 | .await; 50 | } 51 | 52 | #[sqlx::test(migrations = "tests/json_attr/migrations")] 53 | async fn read_none(pool: sqlx::PgPool) { 54 | atmosphere::testing::read(&pool, WithJson { id: 1, data: None }).await; 55 | } 56 | 57 | #[sqlx::test(migrations = "tests/json_attr/migrations")] 58 | async fn update(pool: sqlx::PgPool) { 59 | atmosphere::testing::update( 60 | &pool, 61 | WithJson { 62 | id: 0, 63 | data: Some(Data::new("Laura Maria Caterina Bassi")), 64 | }, 65 | vec![ 66 | WithJson { id: 0, data: None }, 67 | WithJson { 68 | id: 0, 69 | data: Some(Data::new("Ruth Benerito")), 70 | }, 71 | WithJson { id: 0, data: None }, 72 | ], 73 | ) 74 | .await; 75 | } 76 | 77 | #[sqlx::test(migrations = "tests/json_attr/migrations")] 78 | async fn delete(pool: sqlx::PgPool) { 79 | atmosphere::testing::delete( 80 | &pool, 81 | WithJson { 82 | id: 0, 83 | data: Some(Data::new("Harriet Brooks")), 84 | }, 85 | ) 86 | .await; 87 | 88 | atmosphere::testing::delete(&pool, WithJson { id: 1, data: None }).await; 89 | } 90 | -------------------------------------------------------------------------------- /atmosphere-core/src/schema/create.rs: -------------------------------------------------------------------------------- 1 | use crate::{ 2 | Bind, Error, Result, 3 | hooks::{self, HookInput, HookStage, Hooks}, 4 | query::{QueryError, QueryResult}, 5 | schema::Table, 6 | }; 7 | 8 | use async_trait::async_trait; 9 | use sqlx::{Executor, IntoArguments, database::Database}; 10 | 11 | /// Trait for creating rows in a database. 12 | /// 13 | /// This trait provides the functionality to create new rows in a table represented by a struct implementing 14 | /// `Table`, `Bind`, and `Hooks`. It defines an asynchronous method for inserting a new row into the database 15 | /// using a given executor. The trait ensures that all necessary hooks are executed at the appropriate stages 16 | /// of the operation. 17 | #[async_trait] 18 | pub trait Create: Table + Bind + Hooks + Sync + 'static { 19 | /// Creates a new row in the database. This method builds the SQL insert query, 20 | /// binds the necessary values, executes the query, and triggers the relevant hooks at different stages 21 | /// (pre-binding and post-execution). 22 | async fn create<'e, E>( 23 | &mut self, 24 | executor: E, 25 | ) -> Result<::QueryResult> 26 | where 27 | E: Executor<'e, Database = crate::Driver>, 28 | for<'q> ::Arguments<'q>: IntoArguments<'q, crate::Driver> + Send; 29 | } 30 | 31 | #[async_trait] 32 | impl Create for T 33 | where 34 | T: Table + Bind + Hooks + Sync + 'static, 35 | { 36 | async fn create<'e, E>( 37 | &mut self, 38 | executor: E, 39 | ) -> Result<::QueryResult> 40 | where 41 | E: Executor<'e, Database = crate::Driver>, 42 | for<'q> ::Arguments<'q>: IntoArguments<'q, crate::Driver> + Send, 43 | { 44 | let query = crate::runtime::sql::insert::(); 45 | 46 | hooks::execute(HookStage::PreBind, &query, HookInput::Row(self)).await?; 47 | 48 | let mut builder = sqlx::query(query.sql()); 49 | 50 | for c in query.bindings().columns() { 51 | builder = self.bind(c, builder).unwrap(); 52 | } 53 | 54 | let res = builder 55 | .persistent(false) 56 | .execute(executor) 57 | .await 58 | .map_err(QueryError::from) 59 | .map_err(Error::Query); 60 | 61 | hooks::execute( 62 | HookStage::PostExec, 63 | &query, 64 | QueryResult::Execution(&res).into(), 65 | ) 66 | .await?; 67 | 68 | res 69 | } 70 | } 71 | -------------------------------------------------------------------------------- /atmosphere-macros/src/derive/bindings.rs: -------------------------------------------------------------------------------- 1 | use proc_macro2::TokenStream; 2 | use quote::quote; 3 | use syn::Ident; 4 | 5 | use crate::schema::table::Table; 6 | 7 | pub fn bindings(table: &Table) -> TokenStream { 8 | let col = Ident::new("col", proc_macro2::Span::call_site()); 9 | let query = Ident::new("query", proc_macro2::Span::call_site()); 10 | 11 | let mut binds = TokenStream::new(); 12 | 13 | { 14 | let field = &table.primary_key.name.field(); 15 | 16 | binds.extend(quote!( 17 | if #col.field() == Self::PRIMARY_KEY.field { 18 | use ::atmosphere::Bindable; 19 | return Ok(#query.dyn_bind(&self.#field)); 20 | } 21 | )); 22 | } 23 | 24 | for fk in &table.foreign_keys { 25 | let field = fk.name.field(); 26 | 27 | binds.extend(quote!( 28 | if #col.field() == stringify!(#field) { 29 | use ::atmosphere::Bindable; 30 | return Ok(#query.dyn_bind(&self.#field)); 31 | } 32 | )); 33 | } 34 | 35 | for data in &table.data_columns { 36 | let field = data.name.field(); 37 | 38 | if data.modifiers.json { 39 | binds.extend(quote!( 40 | if #col.field() == stringify!(#field) { 41 | use ::atmosphere::Bindable; 42 | use ::atmosphere::sqlx::types::Json; 43 | return Ok(#query.dyn_bind(Json(&self.#field))); 44 | } 45 | )); 46 | } else { 47 | binds.extend(quote!( 48 | if #col.field() == stringify!(#field) { 49 | use ::atmosphere::Bindable; 50 | return Ok(#query.dyn_bind(&self.#field)); 51 | } 52 | )); 53 | } 54 | } 55 | 56 | for ts in &table.timestamp_columns { 57 | let field = ts.name.field(); 58 | 59 | binds.extend(quote!( 60 | if #col.field() == stringify!(#field) { 61 | use ::atmosphere::Bindable; 62 | return Ok(#query.dyn_bind(&self.#field)); 63 | } 64 | )); 65 | } 66 | 67 | let ident = &table.ident; 68 | 69 | quote!( 70 | #[automatically_derived] 71 | impl ::atmosphere::Bind for #ident { 72 | fn bind< 73 | 'q, 74 | Q: ::atmosphere::Bindable<'q> 75 | >( 76 | &'q self, 77 | #col: &'q ::atmosphere::Column, 78 | #query: Q 79 | ) -> ::atmosphere::Result { 80 | #binds 81 | 82 | Err(::atmosphere::Error::Bind( 83 | ::atmosphere::bind::BindError::Unknown(#col.field()) 84 | )) 85 | } 86 | } 87 | ) 88 | } 89 | -------------------------------------------------------------------------------- /docs/src/getting-started/queries.md: -------------------------------------------------------------------------------- 1 | # Queries 2 | 3 | When using Atmosphere, you have two options for writing queries. Once you 4 | annotate your entities with `table`, it gives you the ability to use querying 5 | traits that Atmosphere comes with. However, you can at any point reach down 6 | and write your queries in raw SQL, the way you would if you used `sqlx` 7 | directly. 8 | 9 | ## Using Atmosphere Traits 10 | 11 | Given the Schema from the section before, here is some examples that show how 12 | Atmosphere creates traits that allow for simple operations on the tables. 13 | 14 | ```rust 15 | # extern crate atmosphere; 16 | # extern crate sqlx; 17 | # extern crate tokio; 18 | # use atmosphere::prelude::*; 19 | # #[derive(Debug, PartialEq)] 20 | # #[table(schema = "public", name = "user")] 21 | # struct User { 22 | # #[sql(pk)] 23 | # id: i32, 24 | # name: String, 25 | # #[sql(unique)] 26 | # email: String, 27 | # } 28 | # #[derive(Debug, PartialEq)] 29 | # #[table(schema = "public", name = "post")] 30 | # struct Post { 31 | # #[sql(pk)] 32 | # id: i32, 33 | # #[sql(fk -> User, rename = "author_id")] 34 | # author: i32, 35 | # #[sql(unique)] 36 | # title: String, 37 | # } 38 | # async fn test() -> std::result::Result<(), Box> { 39 | let database = std::env::var("DATABASE_URL").unwrap(); 40 | let pool = atmosphere::Pool::connect(&database).await?; 41 | 42 | let mut user = User { 43 | id: 0, 44 | name: "demo".to_owned(), 45 | email: "some@email.com".to_owned(), 46 | }; 47 | 48 | user.save(&pool).await?; 49 | user.delete(&pool).await?; 50 | user.create(&pool).await?; 51 | 52 | assert_eq!( 53 | User::read(&pool, &0).await?, 54 | User::find_by_email(&pool, &"some@email.com".to_string()).await?.unwrap() 55 | ); 56 | 57 | let mut post = Post { 58 | id: 0, 59 | author: 0, 60 | title: "test".to_owned() 61 | }; 62 | 63 | post.save(&pool).await?; 64 | 65 | Post::find_by_author(&pool, &0).await?; 66 | 67 | // Inter-Table Operations 68 | 69 | Post { id: 1, author: 0, title: "test1".to_owned() } 70 | .author(&pool).await?; 71 | 72 | user.posts(&pool).await?; 73 | user.delete_posts(&pool).await?; 74 | # Ok(()) 75 | # } 76 | # fn main() {} 77 | ``` 78 | 79 | ## Using raw SQL 80 | 81 | As previously explained, it is always possible to reach down and perform raw SQL 82 | queries on an Atmosphere pool, since it is just an alias for an `sqlx` one. 83 | 84 | ```rust 85 | # extern crate atmosphere; 86 | # extern crate sqlx; 87 | # extern crate tokio; 88 | # use atmosphere::prelude::*; 89 | # async fn test() -> std::result::Result<(), Box> { 90 | let database = std::env::var("DATABASE_URL").unwrap(); 91 | let pool = atmosphere::Pool::connect(&database).await?; 92 | 93 | sqlx::query("DROP TABLE foo;") 94 | .execute(&pool).await?; 95 | # Ok(()) 96 | # } 97 | # fn main() {} 98 | ``` 99 | -------------------------------------------------------------------------------- /Cargo.toml: -------------------------------------------------------------------------------- 1 | [workspace] 2 | members = [".", "atmosphere-core", "atmosphere-macros", "atmosphere-extras"] 3 | 4 | [workspace.package] 5 | version = "0.4.1" 6 | license = "Apache-2.0" 7 | edition = "2024" 8 | exclude = ["/.github", "/tests"] 9 | authors = [ 10 | "Florian Eich ", 11 | "Mara Schulke ", 12 | "Nicolas Vizzari-Trinquier ", 13 | ] 14 | description = "A lightweight SQL framework for sustainable database reliant systems" 15 | repository = "https://github.com/helsing-ai/atmosphere" 16 | keywords = ["sqlx", "postgres", "database", "orm", "backend"] 17 | 18 | [workspace.dependencies] 19 | atmosphere-core = { version = "=0.4.1", path = "atmosphere-core" } 20 | atmosphere-extras = { version = "=0.4.1", path = "atmosphere-extras" } 21 | atmosphere-macros = { version = "=0.4.1", path = "atmosphere-macros" } 22 | async-trait = "0.1" 23 | lazy_static = "1" 24 | sqlx = { version = "0.8", default-features = false, features = ["chrono"] } 25 | thiserror = "2" 26 | serde = "1" 27 | 28 | [package] 29 | name = "atmosphere" 30 | readme = "README.md" 31 | description = "A lightweight SQL framework for sustainable database reliant systems" 32 | documentation = "https://docs.rs/atmosphere" 33 | version.workspace = true 34 | license.workspace = true 35 | edition.workspace = true 36 | authors.workspace = true 37 | repository.workspace = true 38 | 39 | [dependencies] 40 | atmosphere-core.workspace = true 41 | atmosphere-extras.workspace = true 42 | atmosphere-macros.workspace = true 43 | async-trait.workspace = true 44 | sqlx.workspace = true 45 | 46 | [features] 47 | default = [] 48 | mysql = ["atmosphere-core/mysql", "atmosphere-macros/mysql"] 49 | postgres = ["atmosphere-core/postgres", "atmosphere-macros/postgres"] 50 | sqlite = ["atmosphere-core/sqlite", "atmosphere-macros/sqlite"] 51 | postgis = ["postgres", "atmosphere-extras/postgis"] 52 | serde = ["atmosphere-extras/serde"] 53 | 54 | [dev-dependencies] 55 | sqlx = { version = "0.8", features = [ 56 | "runtime-tokio-rustls", 57 | "any", 58 | "sqlite", 59 | "mysql", 60 | "postgres", 61 | ] } 62 | tokio = { version = "1", features = ["macros", "rt-multi-thread"] } 63 | tokio-test = "0" 64 | serde = { workspace = true, features = ["derive"] } 65 | 66 | [[example]] 67 | name = "forest" 68 | path = "examples/forest/main.rs" 69 | required-features = ["sqlite"] 70 | 71 | [[example]] 72 | name = "blog" 73 | path = "examples/blog/main.rs" 74 | required-features = ["sqlite"] 75 | 76 | [[test]] 77 | name = "integration" 78 | path = "tests/lib.rs" 79 | test = true 80 | required-features = ["postgres"] 81 | 82 | [package.metadata.docs.rs] 83 | features = ["postgres"] 84 | 85 | [workspace.lints.clippy] 86 | expect_used = "warn" 87 | pedantic = { level = "warn", priority = -1 } 88 | undocumented_unsafe_blocks = "deny" 89 | unwrap_used = "warn" 90 | wildcard_imports = "warn" 91 | -------------------------------------------------------------------------------- /atmosphere-macros/src/derive/queries/unique.rs: -------------------------------------------------------------------------------- 1 | use proc_macro2::{Span, TokenStream}; 2 | use quote::quote; 3 | use syn::Ident; 4 | 5 | use crate::schema::{column::Column, table::Table}; 6 | 7 | pub fn queries(table: &Table) -> TokenStream { 8 | let mut stream = TokenStream::new(); 9 | 10 | let ident = &table.ident; 11 | 12 | let fks: Vec = table 13 | .foreign_keys 14 | .iter() 15 | .filter(|fk| fk.modifiers.unique) 16 | .cloned() 17 | .map(Column::ForeignKey) 18 | .collect(); 19 | 20 | let data: Vec = table 21 | .data_columns 22 | .iter() 23 | .filter(|data| data.modifiers.unique) 24 | .cloned() 25 | .map(Column::Data) 26 | .collect(); 27 | 28 | for column in fks.iter().chain(data.iter()) { 29 | let ty = column.ty(); 30 | let col = column.name().field().to_string().to_lowercase(); 31 | let column = column.quote(); 32 | 33 | let find_by_col = Ident::new(&format!("find_by_{col}"), Span::mixed_site()); 34 | let delete_by_col = Ident::new(&format!("delete_by_{col}"), Span::mixed_site()); 35 | 36 | stream.extend(quote!( 37 | #[automatically_derived] 38 | impl #ident { 39 | pub async fn #find_by_col<'e, E>( 40 | executor: E, 41 | value: &#ty, 42 | ) -> ::atmosphere::Result> 43 | where 44 | E: ::atmosphere::sqlx::Executor<'e, Database = ::atmosphere::Driver>, 45 | for<'q> <::atmosphere::Driver as ::atmosphere::sqlx::database::Database>::Arguments<'q>: 46 | ::atmosphere::sqlx::IntoArguments<'q, ::atmosphere::Driver> + Send 47 | { 48 | use ::atmosphere::{ 49 | query::{Query, QueryError}, 50 | runtime::sql, 51 | Error 52 | }; 53 | 54 | const COLUMN: ::atmosphere::Column<#ident> = #column.as_col(); 55 | 56 | let query = sql::select_by::<#ident>(COLUMN.clone()); 57 | 58 | ::atmosphere::sqlx::query_as(query.sql()) 59 | .bind(value) 60 | .persistent(false) 61 | .fetch_optional(executor) 62 | .await 63 | .map_err(QueryError::from) 64 | .map_err(Error::Query) 65 | } 66 | 67 | pub async fn #delete_by_col<'e, E>( 68 | executor: E, 69 | value: &#ty, 70 | ) -> ::atmosphere::Result<<::atmosphere::Driver as ::atmosphere::sqlx::Database>::QueryResult> 71 | where 72 | E: ::atmosphere::sqlx::Executor<'e, Database = ::atmosphere::Driver>, 73 | for<'q> <::atmosphere::Driver as ::atmosphere::sqlx::database::Database>::Arguments<'q>: 74 | ::atmosphere::sqlx::IntoArguments<'q, ::atmosphere::Driver> + Send 75 | { 76 | use ::atmosphere::{ 77 | query::{Query, QueryError}, 78 | runtime::sql, 79 | Error 80 | }; 81 | 82 | const COLUMN: ::atmosphere::Column<#ident> = #column.as_col(); 83 | 84 | let query = sql::delete_by::<#ident>(COLUMN.clone()); 85 | 86 | ::atmosphere::sqlx::query(query.sql()) 87 | .bind(value) 88 | .persistent(false) 89 | .execute(executor) 90 | .await 91 | .map_err(QueryError::from) 92 | .map_err(Error::Query) 93 | } 94 | } 95 | )) 96 | } 97 | 98 | stream 99 | } 100 | -------------------------------------------------------------------------------- /tests/db/crud.rs: -------------------------------------------------------------------------------- 1 | use atmosphere::prelude::*; 2 | use atmosphere_core::Table; 3 | 4 | #[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Clone)] 5 | #[table(name = "forest", schema = "public")] 6 | struct Forest { 7 | #[sql(pk)] 8 | id: i32, 9 | name: String, 10 | location: String, 11 | } 12 | 13 | #[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Clone)] 14 | #[table(name = "tree", schema = "public")] 15 | struct Tree { 16 | #[sql(pk)] 17 | id: i32, 18 | #[sql(fk -> Forest, rename = "forest_id")] 19 | forest: i32, 20 | } 21 | 22 | #[sqlx::test(migrations = "tests/db/migrations")] 23 | async fn create(pool: sqlx::PgPool) { 24 | atmosphere::testing::create( 25 | &pool, 26 | Forest { 27 | id: 0, 28 | name: "grunewald".to_owned(), 29 | location: "berlin".to_owned(), 30 | }, 31 | ) 32 | .await; 33 | 34 | Forest { 35 | id: 99, 36 | name: "place".to_owned(), 37 | location: "holder".to_owned(), 38 | } 39 | .upsert(&pool) 40 | .await 41 | .unwrap(); 42 | 43 | atmosphere::testing::create(&pool, Tree { id: 0, forest: 99 }).await; 44 | } 45 | 46 | #[sqlx::test(migrations = "tests/db/migrations")] 47 | async fn read(pool: sqlx::PgPool) { 48 | atmosphere::testing::read( 49 | &pool, 50 | Forest { 51 | id: 0, 52 | name: "grunewald".to_owned(), 53 | location: "berlin".to_owned(), 54 | }, 55 | ) 56 | .await; 57 | 58 | Forest { 59 | id: 99, 60 | name: "place".to_owned(), 61 | location: "holder".to_owned(), 62 | } 63 | .upsert(&pool) 64 | .await 65 | .unwrap(); 66 | 67 | atmosphere::testing::read(&pool, Tree { id: 0, forest: 99 }).await; 68 | } 69 | 70 | #[sqlx::test(migrations = "tests/db/migrations")] 71 | async fn update(pool: sqlx::PgPool) { 72 | atmosphere::testing::update( 73 | &pool, 74 | Forest { 75 | id: 0, 76 | name: "grunewald".to_owned(), 77 | location: "berlin".to_owned(), 78 | }, 79 | vec![ 80 | Forest { 81 | id: 0, 82 | name: "gruneeeeeeeewald".to_owned(), 83 | location: "berlin".to_owned(), 84 | }, 85 | Forest { 86 | id: 0, 87 | name: "grunewald".to_owned(), 88 | location: "berlin, germany".to_owned(), 89 | }, 90 | Forest { 91 | id: 0, 92 | name: "englischer garten".to_owned(), 93 | location: "münchen".to_owned(), 94 | }, 95 | ], 96 | ) 97 | .await; 98 | 99 | Forest { 100 | id: 99, 101 | name: "place".to_owned(), 102 | location: "holder".to_owned(), 103 | } 104 | .upsert(&pool) 105 | .await 106 | .unwrap(); 107 | 108 | Forest { 109 | id: 100, 110 | name: "place".to_owned(), 111 | location: "holder".to_owned(), 112 | } 113 | .upsert(&pool) 114 | .await 115 | .unwrap(); 116 | 117 | atmosphere::testing::update( 118 | &pool, 119 | Tree { id: 0, forest: 99 }, 120 | vec![Tree { id: 0, forest: 100 }, Tree { id: 0, forest: 99 }], 121 | ) 122 | .await; 123 | } 124 | 125 | #[sqlx::test(migrations = "tests/db/migrations")] 126 | async fn delete(pool: sqlx::PgPool) { 127 | atmosphere::testing::delete( 128 | &pool, 129 | Forest { 130 | id: 0, 131 | name: "grunewald".to_owned(), 132 | location: "berlin".to_owned(), 133 | }, 134 | ) 135 | .await; 136 | 137 | Forest { 138 | id: 99, 139 | name: "place".to_owned(), 140 | location: "holder".to_owned(), 141 | } 142 | .upsert(&pool) 143 | .await 144 | .unwrap(); 145 | 146 | atmosphere::testing::delete(&pool, Tree { id: 0, forest: 99 }).await; 147 | } 148 | -------------------------------------------------------------------------------- /atmosphere-core/src/hooks/mod.rs: -------------------------------------------------------------------------------- 1 | //! Atmosphere Hook System 2 | //! 3 | //! This module provides a system for defining and applying hooks at various stages of query 4 | //! execution. Hooks allow for custom logic to be executed at predetermined points in the query 5 | //! lifecycle, such as before binding, before execution, and after execution. This functionality is 6 | //! essential for implementing side effects, validations, or augmentations to the standard query 7 | //! process. 8 | //! 9 | //! # Concepts 10 | //! 11 | //! - `HookStage`: An enum representing different stages in the query lifecycle where hooks can be applied. 12 | //! - `HookInput`: An enum representing different types of input that can be provided to hooks. 13 | //! - `Hook`: A trait defining a hook with a specific stage and an application method. 14 | //! - `Hooks`: A trait for associating a set of hooks with a table entity. 15 | //! - `execute`: A function to execute the appropriate hooks for a given stage and context. 16 | //! 17 | //! The hooks system is a powerful tool for extending and customizing the behavior of database operations, 18 | //! enabling developers to embed additional logic seamlessly within the query execution flow. 19 | 20 | use async_trait::async_trait; 21 | 22 | use crate::{ 23 | Bind, Result, Table, 24 | query::{Query, QueryResult}, 25 | }; 26 | 27 | /// Enumerates different stages in the query lifecycle for hook application. 28 | #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord)] 29 | pub enum HookStage { 30 | /// Represents the stage before query parameters are bound. 31 | PreBind, 32 | /// Indicates the stage before query execution. 33 | PreExec, 34 | /// Denotes the stage after the query has been executed. 35 | PostExec, 36 | } 37 | 38 | /// Represents different types of input that can be provided to hooks. 39 | pub enum HookInput<'t, T: Table + Bind> { 40 | /// No input is provided to the hook. 41 | None, 42 | /// A mutable reference to a table row entity. 43 | Row(&'t mut T), 44 | /// A reference to the primary key of a table entity. 45 | PrimaryKey(&'t T::PrimaryKey), 46 | /// The result of a query operation. 47 | QueryResult(QueryResult<'t, T>), 48 | } 49 | 50 | impl<'t, T: Table + Bind> From> for HookInput<'t, T> { 51 | fn from(value: QueryResult<'t, T>) -> Self { 52 | Self::QueryResult(value) 53 | } 54 | } 55 | 56 | /// A trait defining a hook for query execution. 57 | /// 58 | /// Implementors of this trait can define custom logic to be executed at a specific stage of the 59 | /// query lifecycle. The trait provides a method to specify the stage at which the hook should be 60 | /// applied and another method to implement the hook's logic. 61 | #[async_trait] 62 | pub trait Hook: Sync + Send { 63 | /// Returns the stage at which the hook should be applied. 64 | fn stage(&self) -> HookStage; 65 | 66 | /// Asynchronously applies the hook logic to a given query context and input. 67 | async fn apply(&self, ctx: &Query, input: &mut HookInput<'_, T>) -> Result<()> { 68 | let _ = ctx; 69 | let _ = input; 70 | Ok(()) 71 | } 72 | } 73 | 74 | /// A trait for associating a set of hooks with a table entity. 75 | /// 76 | /// Implementors can define a static array of hooks that are associated with a table entity. These 77 | /// hooks are invoked at their respective stages during the query execution process, enabling 78 | /// custom behaviors or validations. 79 | pub trait Hooks: Table + Bind { 80 | /// A static array of references to hooks associated with the implementing table entity. 81 | const HOOKS: &'static [&'static dyn Hook]; 82 | } 83 | 84 | pub(crate) async fn execute( 85 | stage: HookStage, 86 | ctx: &Query, 87 | mut input: HookInput<'_, T>, 88 | ) -> Result<()> { 89 | for hook in T::HOOKS { 90 | if hook.stage() != stage { 91 | continue; 92 | } 93 | 94 | hook.apply(ctx, &mut input).await?; 95 | } 96 | 97 | Ok(()) 98 | } 99 | -------------------------------------------------------------------------------- /atmosphere-core/src/bind.rs: -------------------------------------------------------------------------------- 1 | //! Bind Module for Atmosphere SQL Framework 2 | //! 3 | //! This module provides functionality to bind values to SQL queries in a type-safe and efficient 4 | //! manner. It includes traits and implementations that facilitate the binding of parameters to 5 | //! various SQL query types, ensuring that the queries are correctly formatted and executed against 6 | //! the database. 7 | //! 8 | //! Key components of this module include the `Bindable` trait, which abstracts over different 9 | //! types of queries, allowing for flexible and dynamic binding of values, and the `Bind` trait, 10 | //! which provides an interface for binding columns to SQL queries in the context of a specific 11 | //! table. 12 | //! 13 | //! # Types 14 | //! 15 | //! - `BindError`: An error related to binding operations, such as unknown column errors. 16 | //! - `Bindable`: A trait for abstracting over different query types, providing a method to dynamically bind values. 17 | //! - `Bind`: A trait for binding columns to SQL queries, specific to table entities. 18 | //! 19 | //! The module plays a crucial role in the framework, enabling developers to write database 20 | //! interactions that are both expressive and resilient to errors like incorrect parameter types or 21 | //! missing values. 22 | 23 | use crate::{Column, Result, Table}; 24 | use miette::Diagnostic; 25 | use sqlx::database::Database; 26 | use sqlx::query::QueryAs; 27 | use sqlx::{Encode, QueryBuilder, Type}; 28 | use thiserror::Error; 29 | 30 | /// Enumerates errors that can occur during the binding of values to SQL queries. 31 | /// 32 | /// This enum covers various issues that might arise when binding parameters, such as referencing 33 | /// unknown columns. 34 | #[derive(Debug, Diagnostic, Error)] 35 | #[non_exhaustive] 36 | pub enum BindError { 37 | /// Represents an error where a specified column is unknown or not found. 38 | #[error("unknown column: {0}")] 39 | #[diagnostic(code(atmosphere::bind::unknown))] 40 | Unknown(&'static str), 41 | } 42 | 43 | type Query<'q, DB> = sqlx::query::Query<'q, DB, ::Arguments<'q>>; 44 | 45 | /// Trait for dynamic binding of values. 46 | /// 47 | /// `Bindable` provides an abstraction over different types of SQL queries, such as 48 | /// `sqlx::query::Query` and `sqlx::query::QueryAs`, allowing for flexible and dynamic binding of 49 | /// values. It is designed to work with various query types and enables the binding of values with 50 | /// different types and constraints. 51 | pub trait Bindable<'q> { 52 | /// Binds a value to the query. The value must be compatible with the `atmosphere::Driver`. 53 | fn dyn_bind + Type>( 54 | self, 55 | value: T, 56 | ) -> Self; 57 | } 58 | 59 | impl<'q> Bindable<'q> for Query<'q, crate::Driver> { 60 | fn dyn_bind + Type>( 61 | self, 62 | value: T, 63 | ) -> Self { 64 | self.bind(value) 65 | } 66 | } 67 | 68 | impl<'q, E> Bindable<'q> 69 | for QueryAs<'q, crate::Driver, E, ::Arguments<'q>> 70 | { 71 | fn dyn_bind + Type>( 72 | self, 73 | value: T, 74 | ) -> Self { 75 | self.bind(value) 76 | } 77 | } 78 | 79 | impl<'q> Bindable<'q> for QueryBuilder<'q, crate::Driver> { 80 | fn dyn_bind + Type>( 81 | mut self, 82 | value: T, 83 | ) -> Self { 84 | self.push_bind(value); 85 | self 86 | } 87 | } 88 | 89 | /// Trait for binding columns to SQL queries in the context of a specific table. 90 | /// 91 | /// This trait should be implemented by table entities to enable the binding of their columns to 92 | /// SQL queries. It provides a method to bind a single column, ensuring that the query correctly 93 | /// reflects the structure and constraints of the table. 94 | pub trait Bind: Table { 95 | /// Binds a single column of the implementing table entity to a given query. 96 | fn bind<'q, Q: Bindable<'q>>(&'q self, c: &'q Column, query: Q) -> Result; 97 | } 98 | -------------------------------------------------------------------------------- /flake.nix: -------------------------------------------------------------------------------- 1 | { 2 | description = "Setup for testing Atmosphere"; 3 | 4 | inputs = { 5 | nixpkgs.url = "github:NixOS/nixpkgs/nixos-24.11"; 6 | flake-parts.url = "github:hercules-ci/flake-parts"; 7 | systems.url = "github:nix-systems/default"; 8 | process-compose-flake.url = "github:Platonic-Systems/process-compose-flake"; 9 | services-flake.url = "github:juspay/services-flake"; 10 | rust-overlay = { 11 | inputs.nixpkgs.follows = "nixpkgs"; 12 | url = "github:oxalica/rust-overlay"; 13 | }; 14 | }; 15 | 16 | outputs = 17 | inputs@{ rust-overlay, nixpkgs, ... }: 18 | inputs.flake-parts.lib.mkFlake { inherit inputs; } { 19 | systems = import inputs.systems; 20 | 21 | imports = [ 22 | inputs.process-compose-flake.flakeModule 23 | ]; 24 | 25 | perSystem = 26 | { 27 | self', 28 | system, 29 | lib, 30 | ... 31 | }: 32 | let 33 | overlays = [ (import rust-overlay) ]; 34 | pkgs = import nixpkgs { 35 | inherit system overlays; 36 | }; 37 | dbName = "atmosphere"; 38 | mySqlPort = 3310; 39 | ifDarwin = lib.optionals pkgs.stdenv.isDarwin; 40 | in 41 | { 42 | apps.lint.program = pkgs.writeShellApplication { 43 | name = "cargo.lint"; 44 | runtimeInputs = 45 | with pkgs; 46 | [ 47 | rust-bin.stable.latest.default 48 | typos 49 | ] 50 | ++ ifDarwin [ 51 | darwin.apple_sdk.frameworks.SystemConfiguration 52 | ]; 53 | text = '' 54 | set -eux 55 | 56 | cargo fmt --all 57 | cargo clippy 58 | typos 59 | ''; 60 | }; 61 | 62 | process-compose."default" = 63 | { config, ... }: 64 | { 65 | imports = [ 66 | inputs.services-flake.processComposeModules.default 67 | ]; 68 | 69 | settings.processes.test = 70 | let 71 | pgconf = config.services.postgres.pg; 72 | in 73 | { 74 | command = pkgs.writeShellApplication { 75 | name = "cargo.test.postgres"; 76 | runtimeInputs = 77 | with pkgs; 78 | [ 79 | rust-bin.stable.latest.default 80 | ] 81 | ++ ifDarwin [ 82 | darwin.apple_sdk.frameworks.SystemConfiguration 83 | ]; 84 | text = '' 85 | set -eux 86 | 87 | mkdir -p data 88 | DATABASE_URL=${pgconf.connectionURI { inherit dbName; }} cargo test -F postgres 89 | DATABASE_URL=localhost:${toString mySqlPort} cargo test -F mysql 90 | DATABASE_URL=data/test.db cargo test -F sqlite 91 | ''; 92 | }; 93 | 94 | depends_on = { 95 | "pg".condition = "process_healthy"; 96 | "mysql".condition = "process_healthy"; 97 | }; 98 | }; 99 | 100 | services.postgres."pg" = { 101 | enable = true; 102 | initialDatabases = [ 103 | { name = dbName; } 104 | ]; 105 | }; 106 | 107 | services.mysql."mysql" = { 108 | enable = true; 109 | initialDatabases = [ 110 | { name = dbName; } 111 | ]; 112 | settings.mysqld.port = mySqlPort; 113 | }; 114 | 115 | # avoid both processes trying to create `data` directory at the same time 116 | settings.processes."mysql-configure".depends_on."pg-init".condition = 117 | "process_completed_successfully"; 118 | }; 119 | }; 120 | }; 121 | } 122 | -------------------------------------------------------------------------------- /flake.lock: -------------------------------------------------------------------------------- 1 | { 2 | "nodes": { 3 | "flake-parts": { 4 | "inputs": { 5 | "nixpkgs-lib": "nixpkgs-lib" 6 | }, 7 | "locked": { 8 | "lastModified": 1712014858, 9 | "narHash": "sha256-sB4SWl2lX95bExY2gMFG5HIzvva5AVMJd4Igm+GpZNw=", 10 | "owner": "hercules-ci", 11 | "repo": "flake-parts", 12 | "rev": "9126214d0a59633752a136528f5f3b9aa8565b7d", 13 | "type": "github" 14 | }, 15 | "original": { 16 | "owner": "hercules-ci", 17 | "repo": "flake-parts", 18 | "type": "github" 19 | } 20 | }, 21 | "nixpkgs": { 22 | "locked": { 23 | "lastModified": 1743703532, 24 | "narHash": "sha256-s1KLDALEeqy+ttrvqV3jx9mBZEvmthQErTVOAzbjHZs=", 25 | "owner": "NixOS", 26 | "repo": "nixpkgs", 27 | "rev": "bdb91860de2f719b57eef819b5617762f7120c70", 28 | "type": "github" 29 | }, 30 | "original": { 31 | "owner": "NixOS", 32 | "ref": "nixos-24.11", 33 | "repo": "nixpkgs", 34 | "type": "github" 35 | } 36 | }, 37 | "nixpkgs-lib": { 38 | "locked": { 39 | "dir": "lib", 40 | "lastModified": 1711703276, 41 | "narHash": "sha256-iMUFArF0WCatKK6RzfUJknjem0H9m4KgorO/p3Dopkk=", 42 | "owner": "NixOS", 43 | "repo": "nixpkgs", 44 | "rev": "d8fe5e6c92d0d190646fb9f1056741a229980089", 45 | "type": "github" 46 | }, 47 | "original": { 48 | "dir": "lib", 49 | "owner": "NixOS", 50 | "ref": "nixos-unstable", 51 | "repo": "nixpkgs", 52 | "type": "github" 53 | } 54 | }, 55 | "process-compose-flake": { 56 | "locked": { 57 | "lastModified": 1713920695, 58 | "narHash": "sha256-pQIg3wrNBDdRiuhcVC8DFmTXK8GHtR+iV+5Gvsozx5s=", 59 | "owner": "Platonic-Systems", 60 | "repo": "process-compose-flake", 61 | "rev": "ee8cd505f08f6cd691930e70987a306b7726851b", 62 | "type": "github" 63 | }, 64 | "original": { 65 | "owner": "Platonic-Systems", 66 | "repo": "process-compose-flake", 67 | "type": "github" 68 | } 69 | }, 70 | "root": { 71 | "inputs": { 72 | "flake-parts": "flake-parts", 73 | "nixpkgs": "nixpkgs", 74 | "process-compose-flake": "process-compose-flake", 75 | "rust-overlay": "rust-overlay", 76 | "services-flake": "services-flake", 77 | "systems": "systems" 78 | } 79 | }, 80 | "rust-overlay": { 81 | "inputs": { 82 | "nixpkgs": [ 83 | "nixpkgs" 84 | ] 85 | }, 86 | "locked": { 87 | "lastModified": 1743820323, 88 | "narHash": "sha256-UXxJogXhPhBFaX4uxmMudcD/x3sEGFtoSc4busTcftY=", 89 | "owner": "oxalica", 90 | "repo": "rust-overlay", 91 | "rev": "b4734ce867252f92cdc7d25f8cc3b7cef153e703", 92 | "type": "github" 93 | }, 94 | "original": { 95 | "owner": "oxalica", 96 | "repo": "rust-overlay", 97 | "type": "github" 98 | } 99 | }, 100 | "services-flake": { 101 | "locked": { 102 | "lastModified": 1713781751, 103 | "narHash": "sha256-83fde6KOfQfGhFg/HAyBsENg+gxq89sog/DvFXOceoU=", 104 | "owner": "juspay", 105 | "repo": "services-flake", 106 | "rev": "4a48d4a7b9a6144a48239bf62a278ca922df98e1", 107 | "type": "github" 108 | }, 109 | "original": { 110 | "owner": "juspay", 111 | "repo": "services-flake", 112 | "type": "github" 113 | } 114 | }, 115 | "systems": { 116 | "locked": { 117 | "lastModified": 1681028828, 118 | "narHash": "sha256-Vy1rq5AaRuLzOxct8nz4T6wlgyUR7zLU309k9mBC768=", 119 | "owner": "nix-systems", 120 | "repo": "default", 121 | "rev": "da67096a3b9bf56a91d16901293e51ba5b49a27e", 122 | "type": "github" 123 | }, 124 | "original": { 125 | "owner": "nix-systems", 126 | "repo": "default", 127 | "type": "github" 128 | } 129 | } 130 | }, 131 | "root": "root", 132 | "version": 7 133 | } 134 | -------------------------------------------------------------------------------- /atmosphere-core/src/lib.rs: -------------------------------------------------------------------------------- 1 | //! # `🌍 Atmosphere` 2 | //! 3 | //! **A lightweight sql framework for sustainable database reliant systems** 4 | //! 5 | //! ## Overview 6 | //! 7 | //! Atmosphere is a lightweight SQL framework designed for sustainable, database-reliant systems. 8 | //! It leverages Rust's powerful type and macro systems to derive SQL schemas from your rust struct 9 | //! definitions into an advanced trait system. 10 | //! 11 | //! Atmosphere provides a suite of modules and types that abstract and facilitate various aspects 12 | //! of database operations, from query construction and execution to error handling and schema 13 | //! management. 14 | //! 15 | //! ## Key Features 16 | //! 17 | //! - SQL schema derivation from Rust structs. 18 | //! - Advanced trait system for query generation. 19 | //! - Automated database code testing with `atmosphere::testing` 20 | //! - ORM-like CRUD traits. 21 | //! - Code reusability across API layers using generics. 22 | //! - Compile-time introspection for type-safe schema generation. 23 | 24 | #![cfg(any(feature = "postgres", feature = "mysql", feature = "sqlite"))] 25 | 26 | /// Facilitates binding entities to queries, ensuring type safety and ease of use in query construction. 27 | pub mod bind; 28 | /// Defines high-level database error types, offering a structured approach to error handling. 29 | pub mod error; 30 | /// Implements a hook system, allowing custom logic to be executed at different stages of database 31 | /// interactions. 32 | pub mod hooks; 33 | /// Offers an abstraction layer for building and executing SQL queries, simplifying complex query 34 | /// logic. 35 | pub mod query; 36 | /// Models SQL relationships, providing tools to define and manipulate relationships between 37 | /// database entities. 38 | pub mod rel; 39 | /// Manages the runtime environment for database operations, encompassing execution contexts and 40 | /// configurations. 41 | pub mod runtime; 42 | /// Contains compile-time generated SQL schema traits, enabling a declarative approach to schema 43 | /// definition. 44 | pub mod schema; 45 | /// Provides utilities for automated testing of SQL interactions, ensuring reliability and 46 | /// correctness of database operations. 47 | pub mod testing; 48 | 49 | pub use driver::{Driver, Pool}; 50 | 51 | /// Driver System 52 | /// 53 | /// The default driver / feature `any` is activated by default. If a specific driver 54 | /// feature is enabled (`postgres`, `sqlite`, `mysql`) atmosphere will prefer this over 55 | /// the `sqlx::Any` driver. 56 | /// 57 | /// If your application makes use of more than one database at the same time, please use the any 58 | /// driver. 59 | pub mod driver { 60 | #[cfg(any( 61 | all(feature = "postgres", any(feature = "mysql", feature = "sqlite")), 62 | all(feature = "mysql", any(feature = "postgres", feature = "sqlite")), 63 | all(feature = "sqlite", any(feature = "postgres", feature = "mysql")), 64 | ))] 65 | compile_error!( 66 | "only one database driver can be set – please use multiple binaries using different atmosphere features if you need more than one database" 67 | ); 68 | 69 | #[cfg(all(feature = "postgres", not(any(feature = "mysql", feature = "sqlite"))))] 70 | /// Atmosphere Database Driver 71 | pub type Driver = sqlx::Postgres; 72 | 73 | #[cfg(all(feature = "postgres", not(any(feature = "mysql", feature = "sqlite"))))] 74 | /// Atmosphere Database Pool 75 | pub type Pool = sqlx::PgPool; 76 | 77 | #[cfg(all(feature = "mysql", not(any(feature = "postgres", feature = "sqlite"))))] 78 | /// Atmosphere Database Driver 79 | pub type Driver = sqlx::MySql; 80 | 81 | #[cfg(all(feature = "mysql", not(any(feature = "postgres", feature = "sqlite"))))] 82 | /// Atmosphere Database Pool 83 | pub type Pool = sqlx::MySqlPool; 84 | 85 | #[cfg(all(feature = "sqlite", not(any(feature = "postgres", feature = "mysql"))))] 86 | /// Atmosphere Database Driver 87 | pub type Driver = sqlx::Sqlite; 88 | 89 | #[cfg(all(feature = "sqlite", not(any(feature = "postgres", feature = "mysql"))))] 90 | /// Atmosphere Database Pool 91 | pub type Pool = sqlx::SqlitePool; 92 | } 93 | 94 | pub use bind::*; 95 | pub use error::*; 96 | pub use schema::*; 97 | 98 | #[doc(hidden)] 99 | pub use sqlx; 100 | -------------------------------------------------------------------------------- /atmosphere-core/src/schema/update.rs: -------------------------------------------------------------------------------- 1 | use crate::{ 2 | Bind, Error, Result, 3 | hooks::{self, HookInput, HookStage, Hooks}, 4 | query::{QueryError, QueryResult}, 5 | schema::Table, 6 | }; 7 | 8 | use async_trait::async_trait; 9 | use sqlx::{Database, Executor, IntoArguments}; 10 | 11 | /// Update rows in a database. 12 | /// 13 | /// Provides functionality for updating data in tables within a SQL database. This trait defines 14 | /// asynchronous methods for modifying existing rows in the database, either through direct updates 15 | /// or upserts (update or insert if not exists). It ensures that hooks are executed at various 16 | /// stages, enabling custom logic to be integrated into the update process. 17 | #[async_trait] 18 | pub trait Update: Table + Bind + Hooks + Send + Sync + Unpin + 'static { 19 | /// Updates an existing row in the database. This method constructs an update query, binds the 20 | /// necessary values, executes the query, and applies hooks at predefined stages (e.g., before 21 | /// binding, before execution, after execution). 22 | async fn update<'e, E>( 23 | &mut self, 24 | executor: E, 25 | ) -> Result<::QueryResult> 26 | where 27 | E: Executor<'e, Database = crate::Driver>, 28 | for<'q> ::Arguments<'q>: IntoArguments<'q, crate::Driver> + Send; 29 | 30 | /// Similar to `update`, but either updates an existing row or inserts a new one if it does not 31 | /// exist, depending on the primary key's presence and uniqueness. 32 | async fn upsert<'e, E>( 33 | &mut self, 34 | executor: E, 35 | ) -> Result<::QueryResult> 36 | where 37 | E: Executor<'e, Database = crate::Driver>, 38 | for<'q> ::Arguments<'q>: IntoArguments<'q, crate::Driver> + Send; 39 | } 40 | 41 | #[async_trait] 42 | impl Update for T 43 | where 44 | T: Table + Bind + Hooks + Send + Sync + Unpin + 'static, 45 | { 46 | async fn update<'e, E>( 47 | &mut self, 48 | executor: E, 49 | ) -> Result<::QueryResult> 50 | where 51 | E: Executor<'e, Database = crate::Driver>, 52 | for<'q> ::Arguments<'q>: IntoArguments<'q, crate::Driver> + Send, 53 | { 54 | let query = crate::runtime::sql::update::(); 55 | 56 | hooks::execute(HookStage::PreBind, &query, HookInput::Row(self)).await?; 57 | 58 | let mut sql = sqlx::query(query.sql()); 59 | 60 | for c in query.bindings().columns() { 61 | sql = self.bind(c, sql).unwrap(); 62 | } 63 | 64 | hooks::execute(HookStage::PreExec, &query, HookInput::None).await?; 65 | 66 | let res = sql 67 | .persistent(false) 68 | .execute(executor) 69 | .await 70 | .map_err(QueryError::from) 71 | .map_err(Error::Query); 72 | 73 | hooks::execute( 74 | hooks::HookStage::PostExec, 75 | &query, 76 | QueryResult::Execution(&res).into(), 77 | ) 78 | .await?; 79 | 80 | res 81 | } 82 | 83 | async fn upsert<'e, E>( 84 | &mut self, 85 | executor: E, 86 | ) -> Result<::QueryResult> 87 | where 88 | E: Executor<'e, Database = crate::Driver>, 89 | for<'q> ::Arguments<'q>: IntoArguments<'q, crate::Driver> + Send, 90 | { 91 | let query = crate::runtime::sql::upsert::(); 92 | 93 | hooks::execute(HookStage::PreBind, &query, HookInput::Row(self)).await?; 94 | 95 | let mut sql = sqlx::query(query.sql()); 96 | 97 | for c in query.bindings().columns() { 98 | sql = self.bind(c, sql).unwrap(); 99 | } 100 | 101 | hooks::execute(HookStage::PreExec, &query, HookInput::None).await?; 102 | 103 | let res = sql 104 | .persistent(false) 105 | .execute(executor) 106 | .await 107 | .map_err(QueryError::from) 108 | .map_err(Error::Query); 109 | 110 | hooks::execute( 111 | hooks::HookStage::PostExec, 112 | &query, 113 | QueryResult::Execution(&res).into(), 114 | ) 115 | .await?; 116 | 117 | res 118 | } 119 | } 120 | -------------------------------------------------------------------------------- /atmosphere-macros/src/derive/relationships.rs: -------------------------------------------------------------------------------- 1 | use proc_macro2::{Span, TokenStream}; 2 | use quote::quote; 3 | use syn::Ident; 4 | 5 | use crate::schema::table::Table; 6 | 7 | pub fn relationships(table: &Table) -> TokenStream { 8 | let mut stream = TokenStream::new(); 9 | 10 | let ident = &table.ident; 11 | 12 | for fk in table.foreign_keys.iter() { 13 | let col = fk.quote(); 14 | 15 | let other = &fk.on; 16 | 17 | let find_all_self = Ident::new( 18 | &format!("{}s", ident.to_string().to_lowercase()), 19 | Span::mixed_site(), 20 | ); 21 | 22 | let find_other = Ident::new( 23 | &fk.name.field().to_string().to_lowercase().to_string(), 24 | Span::mixed_site(), 25 | ); 26 | 27 | let find_by_other = Ident::new( 28 | &format!("find_by_{}", fk.name.field().to_string().to_lowercase()), 29 | Span::mixed_site(), 30 | ); 31 | 32 | let delete_self = Ident::new( 33 | &format!("delete_{}s", ident.to_string().to_lowercase()), 34 | Span::mixed_site(), 35 | ); 36 | 37 | stream.extend(quote!( 38 | #[automatically_derived] 39 | impl #ident { 40 | pub async fn #find_other<'e, E>( 41 | &self, 42 | executor: E, 43 | ) -> ::atmosphere::Result<#other> 44 | where 45 | E: ::atmosphere::sqlx::Executor<'e, Database = ::atmosphere::Driver>, 46 | for<'q> <::atmosphere::Driver as ::atmosphere::sqlx::database::Database>::Arguments<'q>: 47 | ::atmosphere::sqlx::IntoArguments<'q, ::atmosphere::Driver> + Send { 48 | <#ident as ::atmosphere::rel::RefersTo<#other>>::resolve(&self, executor).await 49 | } 50 | 51 | pub async fn #find_by_other<'e, E>( 52 | executor: E, 53 | pk: &<#other as ::atmosphere::Table>::PrimaryKey, 54 | // TODO: either Vec, or if marked as unique, only Self 55 | ) -> ::atmosphere::Result> 56 | where 57 | E: ::atmosphere::sqlx::Executor<'e, Database = ::atmosphere::Driver>, 58 | for<'q> <::atmosphere::Driver as ::atmosphere::sqlx::database::Database>::Arguments<'q>: 59 | ::atmosphere::sqlx::IntoArguments<'q, ::atmosphere::Driver> + Send { 60 | <#other as ::atmosphere::rel::ReferredBy<#ident>>::resolve_by(executor, pk).await 61 | } 62 | } 63 | 64 | #[automatically_derived] 65 | impl #other { 66 | pub async fn #find_all_self<'e, E>( 67 | &self, 68 | executor: E, 69 | ) -> ::atmosphere::Result> 70 | where 71 | E: ::atmosphere::sqlx::Executor<'e, Database = ::atmosphere::Driver>, 72 | for<'q> <::atmosphere::Driver as ::atmosphere::sqlx::database::Database>::Arguments<'q>: 73 | ::atmosphere::sqlx::IntoArguments<'q, ::atmosphere::Driver> + Send { 74 | <#other as ::atmosphere::rel::ReferredBy<#ident>>::resolve(&self, executor).await 75 | } 76 | 77 | pub async fn #delete_self<'e, E>( 78 | &self, 79 | executor: E, 80 | ) -> ::atmosphere::Result<<::atmosphere::Driver as ::atmosphere::sqlx::Database>::QueryResult> 81 | where 82 | E: ::atmosphere::sqlx::Executor<'e, Database = ::atmosphere::Driver>, 83 | for<'q> <::atmosphere::Driver as ::atmosphere::sqlx::database::Database>::Arguments<'q>: 84 | ::atmosphere::sqlx::IntoArguments<'q, ::atmosphere::Driver> + Send { 85 | <#other as ::atmosphere::rel::ReferredBy<#ident>>::delete_all(&self, executor).await 86 | } 87 | } 88 | 89 | #[automatically_derived] 90 | impl ::atmosphere::rel::RefersTo<#other> for #ident { 91 | const FOREIGN_KEY: ::atmosphere::ForeignKey<#ident> = #col; 92 | } 93 | 94 | #[automatically_derived] 95 | impl ::atmosphere::rel::ReferredBy<#ident> for #other {} 96 | )); 97 | } 98 | 99 | stream 100 | } 101 | -------------------------------------------------------------------------------- /atmosphere-macros/src/lib.rs: -------------------------------------------------------------------------------- 1 | //! # Macros for Atmosphere 2 | //! 3 | //! This crate provides a set of procedural macros to simplify and automate various tasks in 4 | //! atmosphere. These macros enhance the developer experience by reducing boilerplate, 5 | //! ensuring consistency, and integrating seamlessly with the framework's functionalities. 6 | //! 7 | //! This crate includes macros for deriving schema information from structs, handling table-related 8 | //! attributes, and managing hooks within the framework. The macros are designed to be intuitive 9 | //! and align with the framework's conventions, making them a powerful tool in the application 10 | //! development process. 11 | 12 | #![cfg(any(feature = "postgres", feature = "mysql", feature = "sqlite"))] 13 | 14 | use proc_macro::TokenStream; 15 | use quote::{ToTokens, quote}; 16 | use syn::{ItemStruct, parse_macro_input}; 17 | 18 | mod derive; 19 | mod hooks; 20 | mod schema; 21 | 22 | use schema::table::Table; 23 | 24 | /// An attribute macro that stores metadata about the sql table and derives needed traits. 25 | /// 26 | /// Keys: 27 | /// 28 | /// - `schema` - sets schema name. 29 | /// - `name` - sets table name. 30 | /// 31 | /// Usage: 32 | /// 33 | /// ```ignore 34 | /// # use atmosphere::prelude::*; 35 | /// #[table(schema = "public", name = "user")] 36 | /// # struct User { 37 | /// # #[sql(pk)] 38 | /// # id: i32, 39 | /// # #[sql(unique)] 40 | /// # username: String, 41 | /// # } 42 | /// ``` 43 | #[proc_macro_attribute] 44 | pub fn table(table_args: TokenStream, input: TokenStream) -> TokenStream { 45 | let mut model = parse_macro_input!(input as ItemStruct); 46 | 47 | for ref mut field in model.fields.iter_mut() { 48 | let attribute = field 49 | .attrs 50 | .iter() 51 | .find(|a| a.path().is_ident(schema::column::attribute::PATH)); 52 | 53 | let Some(attribute) = attribute else { 54 | continue; 55 | }; 56 | 57 | let attribute: schema::column::attribute::Attribute = attribute.parse_args().unwrap(); 58 | 59 | if let Some(rename) = attribute.renamed { 60 | struct Extract { 61 | rename: syn::Attribute, 62 | } 63 | 64 | impl syn::parse::Parse for Extract { 65 | fn parse(input: syn::parse::ParseStream) -> syn::Result { 66 | Ok(Self { 67 | rename: input 68 | .call(syn::Attribute::parse_outer)? 69 | .into_iter() 70 | .next() 71 | .unwrap(), 72 | }) 73 | } 74 | } 75 | 76 | let Extract { rename } = 77 | syn::parse_str(&format!("#[sqlx(rename = \"{rename}\")]")).unwrap(); 78 | 79 | field.attrs.push(rename); 80 | } 81 | } 82 | 83 | let table = match Table::parse_struct(&model, table_args) { 84 | Ok(table) => table, 85 | Err(error) => return error.into_compile_error().into(), 86 | }; 87 | 88 | for field in model.fields.iter_mut() { 89 | field.attrs.retain(|attr| !attr.path().is_ident("sql")); 90 | } 91 | 92 | let model = model.to_token_stream(); 93 | let derives = derive::all(&table); 94 | 95 | quote! { 96 | #[derive(::atmosphere::sqlx::FromRow)] 97 | #model 98 | 99 | #derives 100 | } 101 | .into() 102 | } 103 | 104 | /// An attribute macro for registering on a table. Must be used with `#[table]` macro. 105 | /// 106 | /// Takes as argument a type which implements `Hook` for the entity type. 107 | /// 108 | /// Usage: 109 | /// 110 | /// ```ignore 111 | /// # use atmosphere::prelude::*; 112 | /// # use atmosphere::hooks::*; 113 | /// #[table(schema = "public", name = "user")] 114 | /// #[hooks(MyHook)] 115 | /// struct User { 116 | /// #[sql(pk)] 117 | /// id: i32, 118 | /// #[sql(unique)] 119 | /// username: String, 120 | /// } 121 | /// 122 | /// struct MyHook; 123 | /// 124 | /// impl Hook for MyHook { 125 | /// fn stage(&self) -> HookStage { 126 | /// todo!() 127 | /// } 128 | /// } 129 | /// ``` 130 | #[proc_macro_attribute] 131 | pub fn hooks(attr: TokenStream, input: TokenStream) -> TokenStream { 132 | let model = parse_macro_input!(input as ItemStruct); 133 | let _ = parse_macro_input!(attr as hooks::Hooks); 134 | quote! { #model }.into() 135 | } 136 | -------------------------------------------------------------------------------- /atmosphere-core/src/schema/delete.rs: -------------------------------------------------------------------------------- 1 | use crate::{ 2 | Bind, Error, Result, 3 | hooks::{self, Hooks}, 4 | query::{QueryError, QueryResult}, 5 | schema::Table, 6 | }; 7 | 8 | use async_trait::async_trait; 9 | use sqlx::{Database, Executor, IntoArguments}; 10 | 11 | /// Trait for deleting rows from a database. 12 | /// 13 | /// Provides functionality for deleting rows from a table in the database. Implementors of this 14 | /// trait can delete entities either by their instance or by their primary key. The trait ensures 15 | /// proper execution of hooks at various stages of the delete operation, enhancing flexibility and 16 | /// allowing for custom behavior during the deletion process. 17 | #[async_trait] 18 | pub trait Delete: Table + Bind + Hooks + Send + Sync + Unpin + 'static { 19 | /// Deletes the row represented by the instance from the database. Builds and executes a delete 20 | /// query and triggers hooks at appropriate stages (e.g., before binding, before execution, 21 | /// after execution). 22 | async fn delete<'e, E>( 23 | &mut self, 24 | executor: E, 25 | ) -> Result<::QueryResult> 26 | where 27 | E: Executor<'e, Database = crate::Driver>, 28 | for<'q> ::Arguments<'q>: IntoArguments<'q, crate::Driver> + Send; 29 | 30 | /// Deletes a row from the database based on its primary key. This method is particularly 31 | /// useful for deleting entities when only the primary key is available. 32 | async fn delete_by<'e, E>( 33 | executor: E, 34 | pk: &Self::PrimaryKey, 35 | ) -> Result<::QueryResult> 36 | where 37 | E: Executor<'e, Database = crate::Driver>, 38 | for<'q> ::Arguments<'q>: IntoArguments<'q, crate::Driver> + Send; 39 | } 40 | 41 | #[async_trait] 42 | impl Delete for T 43 | where 44 | T: Table + Bind + Hooks + Send + Sync + Unpin + 'static, 45 | { 46 | async fn delete<'e, E>( 47 | &mut self, 48 | executor: E, 49 | ) -> Result<::QueryResult> 50 | where 51 | E: Executor<'e, Database = crate::Driver>, 52 | for<'q> ::Arguments<'q>: IntoArguments<'q, crate::Driver> + Send, 53 | { 54 | let query = crate::runtime::sql::delete::(); 55 | 56 | hooks::execute( 57 | hooks::HookStage::PreBind, 58 | &query, 59 | hooks::HookInput::Row(self), 60 | ) 61 | .await?; 62 | 63 | let mut sql = sqlx::query(query.sql()); 64 | 65 | for c in query.bindings.columns() { 66 | sql = self.bind(c, sql).unwrap(); 67 | } 68 | 69 | hooks::execute(hooks::HookStage::PreExec, &query, hooks::HookInput::None).await?; 70 | 71 | let res = sql 72 | .persistent(false) 73 | .execute(executor) 74 | .await 75 | .map_err(QueryError::from) 76 | .map_err(Error::Query); 77 | 78 | hooks::execute( 79 | hooks::HookStage::PostExec, 80 | &query, 81 | QueryResult::Execution(&res).into(), 82 | ) 83 | .await?; 84 | 85 | res 86 | } 87 | 88 | async fn delete_by<'e, E>( 89 | executor: E, 90 | pk: &Self::PrimaryKey, 91 | ) -> Result<::QueryResult> 92 | where 93 | E: Executor<'e, Database = crate::Driver>, 94 | for<'q> ::Arguments<'q>: IntoArguments<'q, crate::Driver> + Send, 95 | { 96 | let query = crate::runtime::sql::delete::(); 97 | 98 | hooks::execute( 99 | hooks::HookStage::PreBind, 100 | &query, 101 | hooks::HookInput::PrimaryKey(pk), 102 | ) 103 | .await?; 104 | 105 | assert!(query.bindings().columns().len() == 1); 106 | assert!(query.bindings().columns()[0].field() == Self::PRIMARY_KEY.field); 107 | assert!(query.bindings().columns()[0].sql() == Self::PRIMARY_KEY.sql); 108 | 109 | hooks::execute(hooks::HookStage::PreExec, &query, hooks::HookInput::None).await?; 110 | 111 | let res = sqlx::query(query.sql()) 112 | .bind(pk) 113 | .persistent(false) 114 | .execute(executor) 115 | .await 116 | .map_err(QueryError::from) 117 | .map_err(Error::Query); 118 | 119 | hooks::execute( 120 | hooks::HookStage::PostExec, 121 | &query, 122 | QueryResult::Execution(&res).into(), 123 | ) 124 | .await?; 125 | 126 | res 127 | } 128 | } 129 | -------------------------------------------------------------------------------- /atmosphere-core/src/rel.rs: -------------------------------------------------------------------------------- 1 | //! Provides traits for managing relationships between database entities. 2 | //! 3 | //! This module contains traits and their implementations for handling relationships such as 4 | //! 'RefersTo' and 'ReferredBy'. These traits facilitate operations like resolving and deleting 5 | //! relationships in a database using SQLx. 6 | 7 | use async_trait::async_trait; 8 | use sqlx::database::Database; 9 | use sqlx::{Executor, IntoArguments}; 10 | 11 | use crate::bind::Bind; 12 | use crate::query::{Query, QueryError}; 13 | use crate::runtime::sql; 14 | use crate::schema::Table; 15 | use crate::{Error, ForeignKey, Result}; 16 | 17 | /// Defines a relationship where `Self` refers to `Other`. 18 | /// 19 | /// Implements functionality to resolve this relationship, fetching the `Other` entity that `Self` 20 | /// refers to. 21 | #[async_trait] 22 | pub trait RefersTo 23 | where 24 | Self: Table + Bind, 25 | Other: Table + Bind + Unpin + Sync, 26 | { 27 | const FOREIGN_KEY: ForeignKey; 28 | 29 | /// Asynchronously resolves and retrieves the `Other` entity that `Self` refers to from the 30 | /// database. 31 | async fn resolve<'e, E>(&self, executor: E) -> Result 32 | where 33 | E: Executor<'e, Database = crate::Driver>, 34 | for<'q> ::Arguments<'q>: IntoArguments<'q, crate::Driver> + Send, 35 | { 36 | let Query { builder, .. } = sql::select::(); 37 | 38 | let mut query = sqlx::query_as(builder.sql()); 39 | 40 | let fk = Self::FOREIGN_KEY.as_col(); 41 | query = self.bind(&fk, query).unwrap(); 42 | 43 | query 44 | .persistent(false) 45 | .fetch_one(executor) 46 | .await 47 | .map_err(QueryError::from) 48 | .map_err(Error::Query) 49 | } 50 | } 51 | 52 | /// Defines a relationship where `Self` is referred to by many `Other`. 53 | /// 54 | /// This trait provides methods to resolve these relationships, including fetching all `Other` 55 | /// entities referring to `Self`, resolving by primary key, and deleting all such referring 56 | /// entities. 57 | #[async_trait] 58 | pub trait ReferredBy 59 | where 60 | Self: Table + Bind + Unpin + Sync, 61 | Other: Table + Bind + RefersTo + Unpin + Sync, 62 | { 63 | /// Asynchronously fetches all `Other` entities referring to `Self`. 64 | async fn resolve<'e, E>(&self, executor: E) -> Result> 65 | where 66 | E: Executor<'e, Database = crate::Driver>, 67 | for<'q> ::Arguments<'q>: IntoArguments<'q, crate::Driver> + Send, 68 | { 69 | let Query { builder, .. } = sql::select_by::(Other::FOREIGN_KEY.as_col()); 70 | 71 | let mut query = sqlx::query_as(builder.sql()); 72 | 73 | let pk = Self::PRIMARY_KEY.as_col(); 74 | query = self.bind(&pk, query).unwrap(); 75 | 76 | query 77 | .persistent(false) 78 | .fetch_all(executor) 79 | .await 80 | .map_err(QueryError::from) 81 | .map_err(Error::Query) 82 | } 83 | 84 | /// Resolves the referring entities based on the primary key of `Self`. 85 | async fn resolve_by<'e, E>(executor: E, pk: &Self::PrimaryKey) -> Result> 86 | where 87 | E: Executor<'e, Database = crate::Driver>, 88 | for<'q> ::Arguments<'q>: IntoArguments<'q, crate::Driver> + Send, 89 | { 90 | let Query { builder, .. } = sql::select_by::(Other::FOREIGN_KEY.as_col()); 91 | 92 | sqlx::query_as(builder.sql()) 93 | .bind(pk) 94 | .persistent(false) 95 | .fetch_all(executor) 96 | .await 97 | .map_err(QueryError::from) 98 | .map_err(Error::Query) 99 | } 100 | 101 | /// Deletes all `Other` entities referring to `Self`. 102 | async fn delete_all<'e, E>( 103 | &self, 104 | executor: E, 105 | ) -> Result<::QueryResult> 106 | where 107 | E: Executor<'e, Database = crate::Driver>, 108 | for<'q> ::Arguments<'q>: IntoArguments<'q, crate::Driver> + Send, 109 | { 110 | let Query { builder, .. } = sql::delete_by::(Other::FOREIGN_KEY.as_col()); 111 | 112 | let mut query = sqlx::query(builder.sql()); 113 | 114 | let pk = Self::PRIMARY_KEY.as_col(); 115 | query = self.bind(&pk, query).unwrap(); 116 | 117 | query 118 | .persistent(false) 119 | .execute(executor) 120 | .await 121 | .map_err(QueryError::from) 122 | .map_err(Error::Query) 123 | } 124 | } 125 | -------------------------------------------------------------------------------- /atmosphere-macros/src/schema/table.rs: -------------------------------------------------------------------------------- 1 | use std::collections::HashSet; 2 | 3 | use syn::parse::{Parse, ParseStream}; 4 | use syn::spanned::Spanned as _; 5 | use syn::{Error, Fields, Ident, LitStr, Token}; 6 | 7 | use crate::hooks::Hooks; 8 | use crate::schema::column::{Column, DataColumn, TimestampColumn}; 9 | use crate::schema::keys::{ForeignKey, PrimaryKey}; 10 | 11 | #[derive(Clone, Debug)] 12 | pub struct TableId { 13 | pub schema: String, 14 | pub table: String, 15 | } 16 | 17 | impl Parse for TableId { 18 | fn parse(input: ParseStream) -> syn::Result { 19 | let mut schema = None; 20 | let mut table = None; 21 | 22 | while !input.is_empty() { 23 | let ident: syn::Ident = input.parse()?; 24 | input.parse::()?; 25 | let value: LitStr = input.parse()?; 26 | 27 | match ident.to_string().as_str() { 28 | "schema" => schema = Some(value.value()), 29 | "name" => table = Some(value.value()), 30 | _ => { 31 | return Err(syn::Error::new_spanned( 32 | ident, 33 | "`#[table]` supports only the values `schema` and `name`", 34 | )); 35 | } 36 | } 37 | 38 | if !input.peek(Token![,]) { 39 | break; 40 | } 41 | 42 | input.parse::()?; 43 | } 44 | 45 | let schema = schema.ok_or_else(|| { 46 | syn::Error::new(input.span(), "`#[table]` requires a value for `schema`") 47 | })?; 48 | 49 | let table = table.ok_or_else(|| { 50 | syn::Error::new(input.span(), "`#[table]` requires a value for `name`") 51 | })?; 52 | 53 | Ok(Self { schema, table }) 54 | } 55 | } 56 | 57 | #[derive(Clone, Debug)] 58 | pub struct Table { 59 | pub ident: Ident, 60 | 61 | pub id: TableId, 62 | 63 | pub primary_key: PrimaryKey, 64 | pub foreign_keys: HashSet, 65 | pub data_columns: HashSet, 66 | pub timestamp_columns: HashSet, 67 | 68 | pub hooks: Hooks, 69 | } 70 | 71 | impl Table { 72 | pub fn parse_struct( 73 | item: &syn::ItemStruct, 74 | table_args: proc_macro::TokenStream, 75 | ) -> syn::Result { 76 | let id: TableId = syn::parse(table_args)?; 77 | 78 | let hooks: Hooks = { 79 | let attr = item.attrs.iter().find(|attr| attr.path().is_ident("hooks")); 80 | 81 | if let Some(attr) = attr { 82 | attr.parse_args()? 83 | } else { 84 | Hooks::default() 85 | } 86 | }; 87 | 88 | let ident = &item.ident; 89 | 90 | let fields = match &item.fields { 91 | Fields::Named(n) => n, 92 | Fields::Unnamed(_) | Fields::Unit => { 93 | return Err(Error::new( 94 | ident.span(), 95 | format!("{ident} must use named fields in order to be used with `table`"), 96 | )); 97 | } 98 | }; 99 | 100 | let columns = fields 101 | .clone() 102 | .named 103 | .into_iter() 104 | .map(Column::try_from) 105 | .collect::>>()?; 106 | 107 | let primary_key = { 108 | let primary_keys: HashSet = columns 109 | .iter() 110 | .filter_map(|c| c.as_primary_key()) 111 | .cloned() 112 | .collect(); 113 | 114 | if primary_keys.len() > 1 { 115 | return Err(Error::new( 116 | item.span(), 117 | format!( 118 | "{ident} declares more than one column as its primary key – only one is allowed" 119 | ), 120 | )); 121 | } 122 | 123 | primary_keys.into_iter().next().ok_or(Error::new( 124 | item.span(), 125 | format!("{ident} must declare one field as its primary key (using `#[sql(pk)]`"), 126 | ))? 127 | }; 128 | 129 | let foreign_keys = columns 130 | .iter() 131 | .filter_map(|c| c.as_foreign_key()) 132 | .cloned() 133 | .collect(); 134 | 135 | let data_columns = columns 136 | .iter() 137 | .filter_map(|c| c.as_data_column()) 138 | .cloned() 139 | .collect(); 140 | 141 | let timestamp_columns = columns 142 | .iter() 143 | .filter_map(|c| c.as_timestamp_column()) 144 | .cloned() 145 | .collect(); 146 | 147 | Ok(Self { 148 | ident: ident.clone(), 149 | id, 150 | primary_key, 151 | foreign_keys, 152 | data_columns, 153 | timestamp_columns, 154 | hooks, 155 | }) 156 | } 157 | } 158 | -------------------------------------------------------------------------------- /atmosphere-core/src/testing.rs: -------------------------------------------------------------------------------- 1 | //! Provides functions for automated database testing. 2 | //! 3 | //! This module contains asynchronous functions to test the basic CRUD (Create, Read, Update, Delete) 4 | //! operations on database entities. It ensures that these operations are executed correctly and that 5 | //! the data integrity is maintained throughout the process. 6 | 7 | use crate::Entity; 8 | use std::fmt::Debug; 9 | 10 | /// Tests entity creation in the database. 11 | /// 12 | /// Verifies that an entity can be created and retrieved correctly. It asserts the non-existence of 13 | /// the entity before creation and checks for equality between the created and retrieved instances. 14 | pub async fn create(pool: &crate::Pool, mut instance: E) 15 | where 16 | E: Entity + Clone + Debug + Eq + Send, 17 | { 18 | assert!( 19 | E::read(pool, instance.pk()).await.is_err(), 20 | "instance was found (read) before it was created" 21 | ); 22 | 23 | assert!( 24 | E::find(pool, instance.pk()).await.unwrap().is_none(), 25 | "instance was found (find) before it was created" 26 | ); 27 | 28 | instance.create(pool).await.expect("insertion did not work"); 29 | 30 | let retrieved = E::read(pool, instance.pk()) 31 | .await 32 | .expect("instance not found after insertion"); 33 | 34 | assert_eq!(instance, retrieved); 35 | } 36 | 37 | /// Tests reading of an entity from the database. 38 | /// 39 | /// Validates that an entity, once created, can be correctly read from the database. It ensures 40 | /// that the entity does not exist prior to creation and that the retrieved instance matches the 41 | /// created one. 42 | pub async fn read(pool: &crate::Pool, mut instance: E) 43 | where 44 | E: Entity + Clone + Debug + Eq + Send, 45 | { 46 | assert!( 47 | E::read(pool, instance.pk()).await.is_err(), 48 | "instance was found (read) after deletion" 49 | ); 50 | 51 | assert!( 52 | E::find(pool, instance.pk()).await.unwrap().is_none(), 53 | "instance was found (find) after deletion" 54 | ); 55 | 56 | assert!( 57 | E::read_all(pool).await.unwrap().is_empty(), 58 | "there was an instance found in the database before creating" 59 | ); 60 | 61 | instance.create(pool).await.expect("insertion did not work"); 62 | 63 | let retrieved = E::read(pool, instance.pk()) 64 | .await 65 | .expect("instance not found after insertion"); 66 | 67 | assert_eq!(instance, retrieved); 68 | 69 | assert_eq!(E::read_all(pool).await.unwrap(), vec![instance.clone()]); 70 | } 71 | 72 | /// Tests updating of an entity in the database. 73 | /// 74 | /// Checks that an entity can be updated and the changes are correctly reflected. Each update is 75 | /// verified by reloading and comparing it with the original instance. 76 | pub async fn update(pool: &crate::Pool, mut instance: E, updates: Vec) 77 | where 78 | E: Entity + Clone + Debug + Eq + Send, 79 | { 80 | instance.upsert(pool).await.expect("insertion did not work"); 81 | 82 | for mut update in updates { 83 | update 84 | .update(pool) 85 | .await 86 | .expect("updating the instance did not work"); 87 | 88 | instance 89 | .reload(pool) 90 | .await 91 | .expect("reloading the instance did not work"); 92 | 93 | assert_eq!(instance, update); 94 | 95 | let retrieved = E::read(pool, instance.pk()) 96 | .await 97 | .expect("instance not found after update"); 98 | 99 | assert_eq!(instance, retrieved); 100 | 101 | let retrieved = E::find(pool, instance.pk()) 102 | .await 103 | .unwrap() 104 | .expect("instance not found (find) after update"); 105 | 106 | assert_eq!(instance, retrieved); 107 | } 108 | } 109 | 110 | /// Tests deletion of an entity from the database. 111 | /// 112 | /// Ensures that an entity can be deleted and is no longer retrievable post-deletion. It also 113 | /// confirms the non-existence of the entity after a delete operation. 114 | pub async fn delete(pool: &crate::Pool, mut instance: E) 115 | where 116 | E: Entity + Clone + Debug + Eq + Send, 117 | { 118 | instance.create(pool).await.expect("insertion did not work"); 119 | 120 | instance.delete(pool).await.expect("deletion did not work"); 121 | 122 | instance 123 | .reload(pool) 124 | .await 125 | .expect_err("instance could be reloaded from db after deletion"); 126 | 127 | assert!( 128 | E::read(pool, instance.pk()).await.is_err(), 129 | "instance was found (read) after deletion" 130 | ); 131 | 132 | assert!( 133 | E::find(pool, instance.pk()).await.unwrap().is_none(), 134 | "instance was found (find) after deletion" 135 | ); 136 | 137 | instance.create(pool).await.expect("insertion did not work"); 138 | 139 | E::delete_by(pool, instance.pk()) 140 | .await 141 | .expect("deletion did not work"); 142 | 143 | instance 144 | .reload(pool) 145 | .await 146 | .expect_err("instance could be reloaded from db after deletion"); 147 | } 148 | 149 | // TODO: provide helpers to autogenerate uuids, pks, strings, emails, etc – maybe reexport another 150 | // crate? 151 | -------------------------------------------------------------------------------- /atmosphere-core/src/query.rs: -------------------------------------------------------------------------------- 1 | //! Provides structures and enums for handling and executing SQL queries, along with error 2 | //! handling. 3 | //! 4 | //! This module includes custom error types for different database-related errors, enums for query 5 | //! operations and cardinality, and a struct for building and managing queries for database tables. 6 | 7 | use miette::Diagnostic; 8 | use sqlx::QueryBuilder; 9 | use thiserror::Error; 10 | 11 | use crate::{Bind, Result, Table, runtime::sql::Bindings}; 12 | 13 | /// Errors that can occur while executing a database query. 14 | /// 15 | /// This enum includes variants for IO errors, row not found errors, SQLSTATE errors, violation 16 | /// errors, and others, allowing for detailed categorization and handling of different database 17 | /// errors. 18 | #[derive(Debug, Diagnostic, Error)] 19 | #[non_exhaustive] 20 | pub enum QueryError { 21 | /// Database communication (IO / Protocol / TLS) related errors 22 | #[error("IO")] 23 | #[diagnostic(code(atmosphere::query::io))] 24 | Io(#[source] sqlx::Error), 25 | 26 | /// Row not found errors 27 | #[error("not found")] 28 | #[diagnostic(code(atmosphere::query::not_found))] 29 | NotFound(#[source] sqlx::Error), 30 | 31 | /// SQLSTATE errors 32 | #[error("sql")] 33 | #[diagnostic(transparent)] 34 | Sql(#[source] SqlError), 35 | 36 | /// Violation errors 37 | #[error("violation")] 38 | #[diagnostic(transparent)] 39 | Violation(#[source] ViolationError), 40 | 41 | /// Catch-all for sqlx errors 42 | #[error("sqlx")] 43 | #[diagnostic(code(atmosphere::query::sqlx))] 44 | Other(#[source] sqlx::Error), 45 | 46 | /// Atmosphere internal error 47 | #[error("internal error")] 48 | #[diagnostic(code(atmosphere::query::internal))] 49 | InternalError(#[source] sqlx::Error), 50 | } 51 | 52 | /// Represents errors related to constraint violations in the database. 53 | /// 54 | /// Includes uniqueness violations, foreign key violations, and integrity check errors, 55 | /// encapsulating different types of constraint-related issues that can occur during database 56 | /// operations. 57 | #[derive(Debug, Diagnostic, Error)] 58 | #[non_exhaustive] 59 | pub enum ViolationError { 60 | /// Row uniqueness violated 61 | #[error("uniqueness violation")] 62 | #[diagnostic(code(atmosphere::violation::uniqueness))] 63 | Unique(#[source] sqlx::Error), 64 | 65 | /// Foreign key violation 66 | #[error("foreign key violation")] 67 | #[diagnostic(code(atmosphere::violation::foreign_key))] 68 | ForeignKey(#[source] sqlx::Error), 69 | 70 | /// Integritry check failed 71 | #[error("integrity check")] 72 | #[diagnostic(code(atmosphere::violation::integrity))] 73 | Check(#[source] sqlx::Error), 74 | } 75 | 76 | /// Encapsulates errors derived from SQLSTATE codes. 77 | /// 78 | /// This enum categorizes various SQL errors such as data exceptions, integrity constraints, syntax 79 | /// errors, and others, based on their SQLSTATE classification. 80 | #[derive(Debug, Diagnostic, Error)] 81 | #[non_exhaustive] 82 | pub enum SqlError { 83 | /// SQLSTATE Class 22 84 | #[error("data exception")] 85 | #[diagnostic(code(atmosphere::sqlstate::data))] 86 | DataException(#[source] sqlx::Error), 87 | 88 | /// SQLSTATE Class 23 89 | #[error("integrity constraint")] 90 | #[diagnostic(code(atmosphere::sqlstate::integrity))] 91 | IntegrityConstraint(#[source] sqlx::Error), 92 | 93 | /// SQLSTATE Class 42 94 | #[error("syntax")] 95 | #[diagnostic(code(atmosphere::sqlstate::syntax))] 96 | Syntax(#[source] sqlx::Error), 97 | 98 | /// All other classes 99 | #[error("other")] 100 | #[diagnostic(code(atmosphere::sqlstate::other))] 101 | Other(#[source] sqlx::Error), 102 | } 103 | 104 | impl From for QueryError { 105 | fn from(err: sqlx::Error) -> Self { 106 | use sqlx::Error as E; 107 | 108 | match err { 109 | E::RowNotFound => Self::NotFound(err), 110 | E::Io(_) 111 | | E::Protocol(_) 112 | | E::Tls(_) 113 | | E::Configuration(_) 114 | | E::PoolTimedOut 115 | | E::PoolClosed 116 | | E::WorkerCrashed => Self::Io(err), 117 | E::Database(ref e) => { 118 | if e.is_unique_violation() { 119 | return Self::Violation(ViolationError::Unique(err)); 120 | } 121 | 122 | if e.is_foreign_key_violation() { 123 | return Self::Violation(ViolationError::ForeignKey(err)); 124 | } 125 | 126 | if e.is_check_violation() { 127 | return Self::Violation(ViolationError::Check(err)); 128 | } 129 | 130 | // SQLSTATE code handling 131 | // See https://en.wikipedia.org/wiki/SQLSTATE for reference 132 | 133 | if let Some(c) = e.code() { 134 | if c.len() < 5 { 135 | return Self::InternalError(err); 136 | } 137 | 138 | return match &c.as_ref()[0..1] { 139 | "22" => Self::Sql(SqlError::DataException(err)), 140 | "23" => Self::Sql(SqlError::IntegrityConstraint(err)), 141 | "42" => Self::Sql(SqlError::Syntax(err)), 142 | _ => Self::Sql(SqlError::Other(err)), 143 | }; 144 | } 145 | 146 | Self::Other(err) 147 | } 148 | _ => Self::Other(err), 149 | } 150 | } 151 | } 152 | 153 | /// Describes the cardinality of the rows affected by a query. 154 | #[derive(Clone, Copy, Debug, PartialEq, Eq)] 155 | pub enum Cardinality { 156 | None, 157 | One, 158 | Many, 159 | } 160 | 161 | /// Describes the types of operations that a query performs. 162 | #[derive(Clone, Copy, Debug, PartialEq, Eq)] 163 | pub enum Operation { 164 | Select, 165 | Insert, 166 | Update, 167 | Upsert, 168 | Delete, 169 | Other, 170 | } 171 | 172 | /// Represents a atmosphere query over a database table. 173 | pub struct Query { 174 | pub op: Operation, 175 | pub cardinality: Cardinality, 176 | pub(crate) builder: QueryBuilder<'static, crate::Driver>, 177 | pub(crate) bindings: Bindings, 178 | } 179 | 180 | impl Query { 181 | pub(crate) fn new( 182 | op: Operation, 183 | cardinality: Cardinality, 184 | builder: QueryBuilder<'static, crate::Driver>, 185 | bindings: Bindings, 186 | ) -> Self { 187 | Self { 188 | op, 189 | cardinality, 190 | builder, 191 | bindings, 192 | } 193 | } 194 | 195 | /// Access the generated sql 196 | pub fn sql(&self) -> &str { 197 | self.builder.sql() 198 | } 199 | 200 | /// Access the column bindings 201 | pub const fn bindings(&self) -> &Bindings { 202 | &self.bindings 203 | } 204 | } 205 | 206 | /// Describes possible results of executing a query. 207 | pub enum QueryResult<'t, T: Table + Bind> { 208 | Execution(&'t Result<::QueryResult>), 209 | Optional(&'t Result>), 210 | One(&'t Result), 211 | Many(&'t Result>), 212 | } 213 | -------------------------------------------------------------------------------- /atmosphere-core/src/schema/read.rs: -------------------------------------------------------------------------------- 1 | use crate::{ 2 | Bind, Error, Result, 3 | hooks::{self, HookInput, HookStage, Hooks}, 4 | query::{QueryError, QueryResult}, 5 | schema::Table, 6 | }; 7 | 8 | use async_trait::async_trait; 9 | use sqlx::{Executor, IntoArguments, database::Database}; 10 | 11 | /// Trait for reading rows from a database. 12 | /// 13 | /// This trait provides the functionality for reading data from tables in a SQL database. It 14 | /// defines several asynchronous methods for retrieving rows either by their primary key, reloading 15 | /// existing entities, or fetching all rows in a table. The trait incorporates hooks at various 16 | /// stages, allowing for custom logic to be executed as part of the reading process. 17 | #[async_trait] 18 | pub trait Read: Table + Bind + Hooks + Send + Sync + Unpin + 'static { 19 | /// Finds and retrieves a row by its primary key. This method constructs a query to fetch 20 | /// a single row based on the primary key, executes it, and returns the result, optionally 21 | /// triggering hooks before and after execution. 22 | async fn read<'e, E>(executor: E, pk: &Self::PrimaryKey) -> Result 23 | where 24 | E: Executor<'e, Database = crate::Driver>, 25 | for<'q> ::Arguments<'q>: IntoArguments<'q, crate::Driver> + Send; 26 | 27 | /// Finds and retrieves a row by its primary key. This method constructs a query to fetch 28 | /// a single row based on the primary key, executes it, and returns the result, optionally 29 | /// triggering hooks before and after execution. 30 | async fn find<'e, E>(executor: E, pk: &Self::PrimaryKey) -> Result> 31 | where 32 | E: Executor<'e, Database = crate::Driver>, 33 | for<'q> ::Arguments<'q>: IntoArguments<'q, crate::Driver> + Send; 34 | 35 | /// Retrieves all rows from the table. This method is useful for fetching the complete 36 | /// dataset of a table, executing a query to return all rows, and applying hooks as needed. 37 | async fn read_all<'e, E>(executor: E) -> Result> 38 | where 39 | E: Executor<'e, Database = crate::Driver>, 40 | for<'q> ::Arguments<'q>: IntoArguments<'q, crate::Driver> + Send; 41 | 42 | /// Reloads the current entity from the database. This method is designed to update the entity 43 | /// instance with the latest data from the database, ensuring that it reflects the current 44 | /// state of the corresponding row. 45 | async fn reload<'e, E>(&mut self, executor: E) -> Result<()> 46 | where 47 | E: Executor<'e, Database = crate::Driver>, 48 | for<'q> ::Arguments<'q>: IntoArguments<'q, crate::Driver> + Send; 49 | } 50 | 51 | #[async_trait] 52 | impl Read for T 53 | where 54 | T: Table + Bind + Hooks + Send + Sync + Unpin + 'static, 55 | { 56 | async fn read<'e, E>(executor: E, pk: &Self::PrimaryKey) -> Result 57 | where 58 | E: Executor<'e, Database = crate::Driver>, 59 | for<'q> ::Arguments<'q>: IntoArguments<'q, crate::Driver> + Send, 60 | { 61 | let query = crate::runtime::sql::select::(); 62 | 63 | hooks::execute(HookStage::PreBind, &query, HookInput::PrimaryKey(pk)).await?; 64 | 65 | assert!(query.bindings().columns().len() == 1); 66 | assert!(query.bindings().columns()[0].field() == Self::PRIMARY_KEY.field); 67 | assert!(query.bindings().columns()[0].sql() == Self::PRIMARY_KEY.sql); 68 | 69 | hooks::execute(HookStage::PreExec, &query, HookInput::None).await?; 70 | 71 | let res = sqlx::query_as(query.sql()) 72 | .bind(pk) 73 | .persistent(false) 74 | .fetch_one(executor) 75 | .await 76 | .map_err(QueryError::from) 77 | .map_err(Error::Query); 78 | 79 | hooks::execute( 80 | hooks::HookStage::PostExec, 81 | &query, 82 | QueryResult::One(&res).into(), 83 | ) 84 | .await?; 85 | 86 | res 87 | } 88 | 89 | async fn find<'e, E>(executor: E, pk: &Self::PrimaryKey) -> Result> 90 | where 91 | E: Executor<'e, Database = crate::Driver>, 92 | for<'q> ::Arguments<'q>: IntoArguments<'q, crate::Driver> + Send, 93 | { 94 | let query = crate::runtime::sql::select::(); 95 | 96 | hooks::execute(HookStage::PreBind, &query, HookInput::PrimaryKey(pk)).await?; 97 | 98 | assert!(query.bindings().columns().len() == 1); 99 | assert!(query.bindings().columns()[0].field() == Self::PRIMARY_KEY.field); 100 | assert!(query.bindings().columns()[0].sql() == Self::PRIMARY_KEY.sql); 101 | 102 | hooks::execute(HookStage::PreExec, &query, HookInput::None).await?; 103 | 104 | let res = sqlx::query_as(query.sql()) 105 | .bind(pk) 106 | .persistent(false) 107 | .fetch_optional(executor) 108 | .await 109 | .map_err(QueryError::from) 110 | .map_err(Error::Query); 111 | 112 | hooks::execute( 113 | hooks::HookStage::PostExec, 114 | &query, 115 | QueryResult::Optional(&res).into(), 116 | ) 117 | .await?; 118 | 119 | res 120 | } 121 | 122 | async fn read_all<'e, E>(executor: E) -> Result> 123 | where 124 | E: Executor<'e, Database = crate::Driver>, 125 | for<'q> ::Arguments<'q>: IntoArguments<'q, crate::Driver> + Send, 126 | { 127 | let query = crate::runtime::sql::select_all::(); 128 | 129 | hooks::execute(HookStage::PreBind, &query, HookInput::None).await?; 130 | hooks::execute(HookStage::PreExec, &query, HookInput::None).await?; 131 | 132 | let res = sqlx::query_as(query.sql()) 133 | .persistent(false) 134 | .fetch_all(executor) 135 | .await 136 | .map_err(QueryError::from) 137 | .map_err(Error::Query); 138 | 139 | hooks::execute( 140 | hooks::HookStage::PostExec, 141 | &query, 142 | QueryResult::Many(&res).into(), 143 | ) 144 | .await?; 145 | 146 | res 147 | } 148 | 149 | async fn reload<'e, E>(&mut self, executor: E) -> Result<()> 150 | where 151 | E: Executor<'e, Database = crate::Driver>, 152 | for<'q> ::Arguments<'q>: IntoArguments<'q, crate::Driver> + Send, 153 | { 154 | let query = crate::runtime::sql::select_by::(T::PRIMARY_KEY.as_col()); 155 | 156 | hooks::execute(HookStage::PreBind, &query, HookInput::Row(self)).await?; 157 | 158 | let mut sql = sqlx::query_as(query.sql()); 159 | 160 | for c in query.bindings().columns() { 161 | sql = self.bind(c, sql).unwrap(); 162 | } 163 | 164 | hooks::execute(HookStage::PreExec, &query, HookInput::None).await?; 165 | 166 | let res = sql 167 | .persistent(false) 168 | .fetch_one(executor) 169 | .await 170 | .map_err(QueryError::from) 171 | .map_err(Error::Query); 172 | 173 | hooks::execute( 174 | hooks::HookStage::PostExec, 175 | &query, 176 | QueryResult::One(&res).into(), 177 | ) 178 | .await?; 179 | 180 | *self = res?; 181 | 182 | Ok(()) 183 | } 184 | } 185 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 |
2 | 3 | ![Atmosphere](./docs/assets/banner.png) 4 | 5 | # `🌍 Atmosphere` 6 | 7 | **A lightweight sql framework for sustainable database reliant systems** 8 | 9 | [![SQLx](https://img.shields.io/badge/sqlx-framework-blueviolet.svg)](https://github.com/launchbadge/sqlx) 10 | [![Crate](https://img.shields.io/crates/v/atmosphere.svg)](https://crates.io/crates/atmosphere) 11 | [![Book](https://img.shields.io/badge/book-latest-0f5225.svg)](https://helsing-ai.github.io/atmosphere) 12 | [![Docs](https://img.shields.io/badge/docs-latest-153f66.svg)](https://docs.rs/atmosphere) 13 | 14 |
15 | 16 | ## Overview 17 | 18 | Atmosphere is a lightweight SQL framework designed for sustainable, 19 | database-reliant systems. It leverages Rust's powerful type and macro systems 20 | to derive SQL schemas from your rust struct definitions into an advanced trait 21 | system. 22 | 23 | ## Key Features 24 | 25 | - SQL schema derivation from Rust structs. 26 | - Advanced trait system for query generation. 27 | - Automated database code testing with `atmosphere::testing` 28 | - ORM-like CRUD traits. 29 | - Code reusability across API layers using generics. 30 | - Compile-time introspection for type-safe schema generation. 31 | 32 | ## Quickstart 33 | 34 | ```rust 35 | use atmosphere::prelude::*; 36 | 37 | #[table(schema = "public", name = "user")] 38 | struct User { 39 | #[sql(pk)] 40 | id: i32, 41 | name: String, 42 | #[sql(unique)] 43 | email: String, 44 | } 45 | 46 | #[table(schema = "public", name = "post")] 47 | struct Post { 48 | #[sql(pk)] 49 | id: i32, 50 | #[sql(fk -> User, rename = "author_id")] 51 | author: i32, 52 | #[sql(unique)] 53 | title: String, 54 | } 55 | 56 | #[tokio::main] 57 | async fn main() -> sqlx::Result<()> { 58 | let pool = atmosphere::Pool::connect(&std::env::var("DATABASE_URL").unwrap()).await?; 59 | 60 | // CRUD operations 61 | 62 | let user = User { id: 0, name: "demo".to_owned(), email: "some@email.com".to_owned(), }; 63 | 64 | user.save(&pool).await?; 65 | user.delete(&pool).await?; 66 | user.create(&pool).await?; 67 | 68 | // Field Queries 69 | 70 | assert_eq!( 71 | User::read(&pool, &0).await?, 72 | User::find_by_email(&pool, "some@email.com").await?.unwrap() 73 | ); 74 | 75 | // Relationships 76 | 77 | Post { id: 0, author: 0, title: "test".to_owned() } 78 | .save(&pool) 79 | .await?; 80 | 81 | Post::find_by_author(&pool, &0).await?; 82 | Post::delete_by_author(&pool, &0).await?; 83 | 84 | // Inter-Table Operations 85 | 86 | Post { id: 1, author: 0, title: "test1".to_owned() } 87 | .author(&pool).await?; 88 | 89 | user.posts(&pool).await?; 90 | user.delete_posts(&pool).await?; 91 | 92 | Ok(()) 93 | } 94 | ``` 95 | 96 | Atmosphere introspects the `User` and `Post` structs at compile time and 97 | generates `const` available type information about the schema into the `Table` 98 | trait. 99 | 100 | ## Roadmap 101 | 102 | ### Alpha Release 103 | - [x] Advanced SQL Trait System (`Table`, `Column`, `Relation` ..) 104 | - [x] SQL Field Attributes (`#[sql(pk)]`, `#[sql(fk -> Model)]` and so on) 105 | - [x] SQL Query Generation 106 | - [x] Automated Integration Testing 107 | - [x] Attribute Macro (`#[table]`) 108 | 109 | ### Beta Release 110 | - [x] Transaction Support 111 | - [x] Getting Database Agnostic 112 | - [x] Hook into query execution using `atmosphere::hooks` 113 | - [x] Errors using `miette` 114 | - [ ] Combined Primary and Foreign Keys 115 | 116 | ### Stable Release 117 | - [x] Postgres Composite Types 118 | - [x] Support Custom Types 119 | - [x] Runtime Inspection 120 | - [x] Provide Application Utils 121 | - [ ] Stabilize Traits 122 | - [ ] Stabilize Query Generation 123 | - [ ] Table Lenses (subsets / views) 124 | - [ ] `validator` support 125 | - [ ] Auto Timestamping 126 | 127 | ### Advanced 128 | - [ ] Virtual Columns using (`#[virtual = ""]`) 129 | - [ ] Soft Delete Support 130 | - [ ] Attribute Macro (`#[query]`) 131 | - [ ] Custom queries 132 | 133 | ### Longterm 134 | - [ ] Generate GraphQL + HTTP Servers? 135 | - [ ] Generate Graphs 136 | 137 | 138 | ## Functionalities 139 | 140 | Given a `struct Model` that derives its atmosphere schema using `#[table]`: 141 | 142 | ```rust 143 | use atmosphere::prelude::*; 144 | 145 | #[table(schema = "public", name = "model")] 146 | struct Model { 147 | #[sql(pk)] 148 | id: i32, 149 | a: String, 150 | #[sql(unique)] 151 | b: String, 152 | } 153 | ``` 154 | 155 | Atmosphere is able to derive and generate the following queries: 156 | 157 | ### CRUD 158 | 159 | #### `atmosphere::Create` 160 | 161 | - `Model::create` 162 | 163 | #### `atmosphere::Read` 164 | 165 | - `Model::read`: read a `Model` by its primary key, returning a `Model`. 166 | - `Model::find`: find a `Model` by its primary key, returning an `Option`. 167 | - `Model::read_all`: read all `Model`s, returning a `Vec`. 168 | - `Model::reload` 169 | 170 | #### `atmosphere::Update` 171 | 172 | - `Model::update` 173 | - `Model::upsert` 174 | 175 | #### `atmosphere::Delete` 176 | 177 | - `Model::delete` 178 | - `Model::delete_by` 179 | 180 | ### Field Queries 181 | 182 | Each struct field that is marked with `#[sql(unique)]` becomes queryable. 183 | 184 | In the above example `b` was marked as unique so atmosphere implements: 185 | 186 | - `Model::find_by_b`: find a `Model` by its `b` field, returning an `Option`. 187 | - `Model::delete_by_b`: delete a `Model` by its `b` field. 188 | 189 | ### Relationships & Inter-Table Queries 190 | 191 | Given that a model contains fields are marked as a foreign key / point to 192 | another `atmosphere::Table` atmosphere – for example: 193 | 194 | ```rust 195 | #[table(schema = "public", name = "submodel")] 196 | struct Submodel { 197 | #[sql(pk)] 198 | id: i32, 199 | #[sql(fk -> Model)] 200 | super: i32, 201 | } 202 | ``` 203 | 204 | Atmosphere is able to generate utility queries to move across `Table` boundaries: 205 | 206 | - `Model::submodels` 207 | - `Model::delete_submodels` 208 | - `Submodel::model` 209 | - `Submodel::find_by_model` 210 | - `Submodel::delete_by_model` 211 | 212 | > Note that the function names contain `model` and `submodel` – they are derived from 213 | > the respective struct names. 214 | 215 | ### Json support 216 | 217 | Several databases support a `JSON` (and often `JSONB`) type, for which `sqlx` has native support through `#[sqlx(json)]` and `#[sqlx(json(nullable))]`. 218 | 219 | Since `atmosphere` only needs to know whether the column is JSON and to stay forward-compatible with future changes to `sqlx`'s attribute, we use the following syntax: 220 | 221 | ```rust 222 | #[table(schema = "public", name = "submodel")] 223 | struct Submodel { 224 | #[sql(pk)] 225 | id: i32, 226 | #[sql(json)] 227 | #[sqlx(json)] 228 | data: HashMap, 229 | #[sql(json)] 230 | #[sqlx(json(nullable))] 231 | optional_data: Option>, 232 | } 233 | ``` 234 | 235 | You can also manually handle the JSON support using the following (it will mean accessing the inner type through `.as_ref()` or `.0`): 236 | 237 | ```rust 238 | use sqlx::types::Json; 239 | 240 | #[table(schema = "public", name = "submodel")] 241 | struct Submodel { 242 | #[sql(pk)] 243 | id: i32, 244 | data: Json>, 245 | optional_data: Option>>, 246 | } 247 | ``` 248 | 249 | ## Contribution 250 | 251 | We welcome contributions! Please see [our contribution guidelines](CONTRIBUTING.md) for more details. 252 | 253 | ## License 254 | 255 | Atmosphere is licensed under Apache 2.0. 256 | -------------------------------------------------------------------------------- /atmosphere-extras/src/postgis/geometry.rs: -------------------------------------------------------------------------------- 1 | use sqlx::{Database, Decode, Encode, Postgres, Type}; 2 | 3 | /// Error related to decoding operations from Postgres via sqlx. 4 | #[derive(Debug, thiserror::Error)] 5 | pub enum DecodeErr { 6 | /// Indicates that we received a different geometry type from the one we expected. 7 | #[error("expected '{expected}', but instead got '{decoded:?}'")] 8 | WrongType { 9 | expected: &'static str, 10 | decoded: geo_types::Geometry, 11 | }, 12 | /// Indicates that we received a `NULL` value instead of a concrete geometry value. 13 | #[error("expected a non-NULL value, but got NULL instead")] 14 | UnexpectedNull, 15 | } 16 | 17 | pub mod point { 18 | use super::*; 19 | 20 | /// Wrapper type for PostGIS Point type, which can be used in a table. Provides encoding and 21 | /// decoding implementations. 22 | #[derive(Debug, Clone, Copy, PartialEq)] 23 | #[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] 24 | #[cfg_attr(feature = "serde", serde(transparent))] 25 | pub struct Point(pub(crate) geo_types::Point); 26 | 27 | impl Point { 28 | pub fn new(x: f64, y: f64) -> Self { 29 | Self(geo_types::Point::new(x, y)) 30 | } 31 | } 32 | 33 | impl From> for Point { 34 | fn from(value: geo_types::Point) -> Self { 35 | Self(value) 36 | } 37 | } 38 | 39 | impl Type for Point { 40 | fn type_info() -> ::TypeInfo { 41 | sqlx::postgres::PgTypeInfo::with_name("geometry") 42 | } 43 | } 44 | 45 | impl<'r> Decode<'r, Postgres> for Point { 46 | fn decode( 47 | value: ::ValueRef<'r>, 48 | ) -> Result { 49 | let decoded = geozero::wkb::Decode::>::decode(value)?; 50 | 51 | match decoded.geometry { 52 | Some(geo_types::Geometry::Point(p)) => Ok(p.into()), 53 | Some(other) => Err(Box::new(DecodeErr::WrongType { 54 | expected: "point", 55 | decoded: other, 56 | })), 57 | None => Err(Box::new(DecodeErr::UnexpectedNull)), 58 | } 59 | } 60 | } 61 | 62 | impl<'q> Encode<'q, Postgres> for Point { 63 | fn encode_by_ref( 64 | &self, 65 | buf: &mut ::ArgumentBuffer<'q>, 66 | ) -> Result { 67 | let geometry = geo_types::Geometry::Point(self.0); 68 | geozero::wkb::Encode(geometry).encode(buf) 69 | } 70 | } 71 | 72 | #[cfg(test)] 73 | mod tests { 74 | use crate::postgis::Point; 75 | 76 | #[cfg(feature = "serde")] 77 | #[test] 78 | fn serialize_deserialize() { 79 | let point = Point::new(4., 2.); 80 | 81 | let serialized = serde_json::to_string(&point).unwrap(); 82 | assert_eq!(serialized, r#"{"x":4.0,"y":2.0}"#); 83 | 84 | let deserialized = serde_json::from_str(&serialized).unwrap(); 85 | assert_eq!(point, deserialized); 86 | } 87 | } 88 | } 89 | 90 | mod polygon { 91 | use sqlx::postgres::PgTypeInfo; 92 | 93 | use super::*; 94 | 95 | /// A wrapper for the PostGIS `Polygon` type, providing `Encode` and `Decode` implementations for 96 | /// database persistence. 97 | #[derive(Debug, Clone, PartialEq)] 98 | pub struct Polygon(pub(crate) geo_types::Polygon); 99 | 100 | impl From> for Polygon { 101 | fn from(value: geo_types::Polygon) -> Self { 102 | Self(value) 103 | } 104 | } 105 | 106 | impl FromIterator for Polygon { 107 | fn from_iter>(iter: T) -> Self { 108 | let exterior = iter.into_iter().map(|point| point.0).collect(); 109 | Self(geo_types::Polygon::new(exterior, Vec::default())) 110 | } 111 | } 112 | 113 | impl From<&[super::Point]> for Polygon { 114 | fn from(points: &[super::Point]) -> Self { 115 | Self::from_iter(points.iter().copied()) 116 | } 117 | } 118 | 119 | impl Type for Polygon { 120 | fn type_info() -> ::TypeInfo { 121 | PgTypeInfo::with_name("geometry") 122 | } 123 | } 124 | 125 | impl<'q> Decode<'q, Postgres> for Polygon { 126 | fn decode( 127 | value: ::ValueRef<'q>, 128 | ) -> Result { 129 | let decoded = geozero::wkb::Decode::>::decode(value)?; 130 | 131 | match decoded.geometry { 132 | Some(geo_types::Geometry::Polygon(p)) => Ok(p.into()), 133 | Some(other) => Err(Box::new(DecodeErr::WrongType { 134 | expected: "polygon", 135 | decoded: other, 136 | })), 137 | None => Err(Box::new(DecodeErr::UnexpectedNull)), 138 | } 139 | } 140 | } 141 | 142 | impl<'r> Encode<'r, Postgres> for Polygon { 143 | fn encode( 144 | self, 145 | buf: &mut ::ArgumentBuffer<'r>, 146 | ) -> Result 147 | where 148 | Self: Sized, 149 | { 150 | let geometry = geo_types::Geometry::Polygon(self.0); 151 | geozero::wkb::Encode(geometry).encode(buf) 152 | } 153 | 154 | fn encode_by_ref( 155 | &self, 156 | buf: &mut ::ArgumentBuffer<'r>, 157 | ) -> Result { 158 | self.clone().encode(buf) 159 | } 160 | } 161 | 162 | #[cfg(feature = "serde")] 163 | mod serde { 164 | #[derive(serde::Serialize, serde::Deserialize)] 165 | struct InternalPolygon(Vec); 166 | 167 | impl serde::Serialize for super::Polygon { 168 | fn serialize(&self, serializer: S) -> Result 169 | where 170 | S: serde::Serializer, 171 | { 172 | let exterior = self.0.exterior(); 173 | 174 | let mut points = Vec::with_capacity(exterior.0.len()); 175 | 176 | for coord in exterior { 177 | let point = geo_types::Point(*coord); 178 | points.push(super::Point(point)); 179 | } 180 | 181 | InternalPolygon(points).serialize(serializer) 182 | } 183 | } 184 | 185 | impl<'de> serde::Deserialize<'de> for super::Polygon { 186 | fn deserialize(deserializer: D) -> Result 187 | where 188 | D: serde::Deserializer<'de>, 189 | { 190 | let InternalPolygon(points) = InternalPolygon::deserialize(deserializer)?; 191 | let coords = points.into_iter().map(|point| point.0.0).collect(); 192 | let exterior = geo_types::LineString::new(coords); 193 | let polygon = geo_types::Polygon::new(exterior, Vec::default()); 194 | 195 | Ok(Self(polygon)) 196 | } 197 | } 198 | 199 | #[cfg(test)] 200 | mod tests { 201 | use crate::postgis::Point; 202 | 203 | use super::super::Polygon; 204 | 205 | #[test] 206 | fn serialize_deserialize() { 207 | let polygon = Polygon::from_iter([ 208 | Point::new(0., 0.), 209 | Point::new(1., 0.), 210 | Point::new(0., 1.), 211 | Point::new(1., 1.), 212 | ]); 213 | 214 | let serialized = serde_json::to_string(&polygon).unwrap(); 215 | assert_eq!( 216 | serialized, 217 | r#"[{"x":0.0,"y":0.0},{"x":1.0,"y":0.0},{"x":0.0,"y":1.0},{"x":1.0,"y":1.0},{"x":0.0,"y":0.0}]"# 218 | ); 219 | 220 | let deserialized = serde_json::from_str(&serialized).unwrap(); 221 | assert_eq!(polygon, deserialized); 222 | } 223 | } 224 | } 225 | } 226 | 227 | pub use point::*; 228 | pub use polygon::*; 229 | -------------------------------------------------------------------------------- /atmosphere-core/src/schema/mod.rs: -------------------------------------------------------------------------------- 1 | //! SQL Schema 2 | //! 3 | //! This module provides the foundational building blocks for defining the structure and 4 | //! relationships of SQL tables in atmosphere. It includes traits and types that describe table 5 | //! structures, column details, and primary and foreign key relationships. This is essential 6 | //! for representing and manipulating database schema in a type-safe and Rust-idiomatic way. 7 | 8 | use sqlx::{Database, Encode, FromRow, Type}; 9 | 10 | mod create; 11 | mod delete; 12 | mod read; 13 | mod update; 14 | 15 | pub use create::Create; 16 | pub use delete::Delete; 17 | pub use read::Read; 18 | pub use update::Update; 19 | 20 | pub use self::column::{Column, DataColumn, ForeignKey, PrimaryKey, TimestampColumn}; 21 | 22 | /// SQL Table Definition 23 | /// 24 | /// Represents the definition of a SQL table within the framework, encompassing primary keys, 25 | /// foreign keys, data columns, and timestamp columns. This trait should be implemented by structs 26 | /// that represent database tables, providing metadata and utility functions for table manipulation 27 | /// and query building. 28 | pub trait Table 29 | where 30 | Self: Sized + Send + for<'r> FromRow<'r, ::Row> + 'static, 31 | Self::PrimaryKey: for<'q> Encode<'q, crate::Driver> + Type + Send, 32 | { 33 | /// The type of the primary key for the table. 34 | type PrimaryKey: Sync + Sized + 'static; 35 | 36 | /// The database schema in which the table resides. 37 | const SCHEMA: &'static str; 38 | /// The name of the table. 39 | const TABLE: &'static str; 40 | 41 | /// The primary key column of the table. 42 | const PRIMARY_KEY: PrimaryKey; 43 | /// An array of foreign key columns. 44 | const FOREIGN_KEYS: &'static [ForeignKey]; 45 | /// An array of data columns. 46 | const DATA_COLUMNS: &'static [DataColumn]; 47 | /// An array of timestamp columns. 48 | const TIMESTAMP_COLUMNS: &'static [TimestampColumn]; 49 | 50 | /// Returns a reference to the primary key of the table instance. 51 | fn pk(&self) -> &Self::PrimaryKey; 52 | } 53 | 54 | /// Trait representing an Entity that maps to a database table. 55 | /// 56 | /// Entities are table representations that implement CRUD (Create, Read, Update, Delete) 57 | /// operations. This trait is automatically implemented for any type that satisfies the `Create`, 58 | /// `Read`, `Update`, and `Delete` trait requirements, tying together the core functionalities 59 | /// needed for database interaction in the framework. 60 | pub trait Entity: Create + Read + Update + Delete {} 61 | 62 | impl Entity for E {} 63 | 64 | /// Column types representing various aspects of table columns. 65 | /// 66 | /// These types provide detailed descriptions of table columns, their roles, and their SQL 67 | /// representations. They are used to define the structure of a table and guide query construction 68 | /// and execution within the framework. 69 | pub mod column { 70 | use crate::Table; 71 | use std::marker::PhantomData; 72 | 73 | /// An enum that encapsulates different column types of a table. 74 | #[derive(Copy, Debug, PartialEq, Eq)] 75 | pub enum Column { 76 | /// A primary key 77 | PrimaryKey(&'static PrimaryKey), 78 | /// A foreign key 79 | ForeignKey(&'static ForeignKey), 80 | /// A data column 81 | Data(&'static DataColumn), 82 | /// A timestamp column 83 | Timestamp(&'static TimestampColumn), 84 | } 85 | 86 | impl Clone for Column { 87 | fn clone(&self) -> Self { 88 | match self { 89 | Self::PrimaryKey(pk) => Self::PrimaryKey(*pk), 90 | Self::ForeignKey(fk) => Self::ForeignKey(*fk), 91 | Self::Data(data) => Self::Data(*data), 92 | Self::Timestamp(ts) => Self::Timestamp(*ts), 93 | } 94 | } 95 | } 96 | 97 | impl Column { 98 | pub const fn field(&self) -> &'static str { 99 | match self { 100 | Self::PrimaryKey(pk) => pk.field, 101 | Self::ForeignKey(fk) => fk.field, 102 | Self::Data(data) => data.field, 103 | Self::Timestamp(ts) => ts.field, 104 | } 105 | } 106 | 107 | pub const fn sql(&self) -> &'static str { 108 | match self { 109 | Self::PrimaryKey(pk) => pk.sql, 110 | Self::ForeignKey(fk) => fk.sql, 111 | Self::Data(data) => data.sql, 112 | Self::Timestamp(ts) => ts.sql, 113 | } 114 | } 115 | } 116 | 117 | /// Describes the primary key column of a table. 118 | #[derive(Copy, Debug, PartialEq, Eq)] 119 | pub struct PrimaryKey { 120 | pub field: &'static str, 121 | pub sql: &'static str, 122 | table: PhantomData, 123 | } 124 | 125 | impl PrimaryKey { 126 | pub const fn new(field: &'static str, sql: &'static str) -> Self { 127 | Self { 128 | field, 129 | sql, 130 | table: PhantomData, 131 | } 132 | } 133 | 134 | pub const fn as_col(&'static self) -> Column { 135 | Column::PrimaryKey(self) 136 | } 137 | } 138 | 139 | impl Clone for PrimaryKey { 140 | fn clone(&self) -> Self { 141 | Self { 142 | field: self.field, 143 | sql: self.sql, 144 | table: PhantomData, 145 | } 146 | } 147 | } 148 | 149 | /// Represents a foreign key column, establishing a relationship to another table. 150 | #[derive(Copy, Debug, PartialEq, Eq)] 151 | pub struct ForeignKey { 152 | /// The rust field name of the model 153 | pub field: &'static str, 154 | /// The associated sql column name 155 | pub sql: &'static str, 156 | table: PhantomData, 157 | } 158 | 159 | impl ForeignKey { 160 | pub const fn new(field: &'static str, sql: &'static str) -> Self { 161 | Self { 162 | field, 163 | sql, 164 | table: PhantomData, 165 | } 166 | } 167 | 168 | pub const fn as_col(&'static self) -> Column { 169 | Column::ForeignKey(self) 170 | } 171 | 172 | /// # Safety 173 | /// We do treat this foreign key as a column of another table. This is not smart to do - but 174 | /// can become necessary when doing complex joins. This is memory safe as `Self` and 175 | /// `Self` have the exact same memory layout, we do not store any data (A or B) but only 176 | /// a `PhantomData` instance which is here transmuted. 177 | pub const unsafe fn transmute(&'static self) -> &'static ForeignKey { 178 | unsafe { std::mem::transmute(self) } 179 | } 180 | } 181 | 182 | impl Clone for ForeignKey { 183 | fn clone(&self) -> Self { 184 | Self { 185 | field: self.field, 186 | sql: self.sql, 187 | table: PhantomData, 188 | } 189 | } 190 | } 191 | 192 | /// Defines a standard data column in the table. 193 | #[derive(Copy, Debug, PartialEq, Eq)] 194 | pub struct DataColumn { 195 | /// The rust field name of the model 196 | pub field: &'static str, 197 | /// The associated sql column name 198 | pub sql: &'static str, 199 | table: PhantomData, 200 | } 201 | 202 | impl DataColumn { 203 | pub const fn new(field: &'static str, sql: &'static str) -> Self { 204 | Self { 205 | field, 206 | sql, 207 | table: PhantomData, 208 | } 209 | } 210 | 211 | pub const fn as_col(&'static self) -> Column { 212 | Column::Data(self) 213 | } 214 | } 215 | 216 | impl Clone for DataColumn { 217 | fn clone(&self) -> Self { 218 | Self { 219 | field: self.field, 220 | sql: self.sql, 221 | table: PhantomData, 222 | } 223 | } 224 | } 225 | 226 | /// The type of a timestamp column 227 | #[derive(Clone, Copy, Debug, PartialEq, Eq)] 228 | pub enum TimestampKind { 229 | Created, 230 | Updated, 231 | Deleted, 232 | } 233 | 234 | /// Specifies a timestamp column, typically used for tracking creation, update, or deletion times. 235 | #[derive(Copy, Debug, PartialEq, Eq)] 236 | pub struct TimestampColumn { 237 | /// The type of this timestamp column 238 | pub kind: TimestampKind, 239 | /// The rust field name of the model 240 | pub field: &'static str, 241 | /// The associated sql column name 242 | pub sql: &'static str, 243 | table: PhantomData, 244 | } 245 | 246 | impl TimestampColumn { 247 | pub const fn new(kind: TimestampKind, field: &'static str, sql: &'static str) -> Self { 248 | Self { 249 | kind, 250 | field, 251 | sql, 252 | table: PhantomData, 253 | } 254 | } 255 | } 256 | 257 | impl Clone for TimestampColumn { 258 | fn clone(&self) -> Self { 259 | Self { 260 | kind: self.kind, 261 | field: self.field, 262 | sql: self.sql, 263 | table: PhantomData, 264 | } 265 | } 266 | } 267 | } 268 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Apache License 2 | Version 2.0, January 2004 3 | http://www.apache.org/licenses/ 4 | 5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 6 | 7 | 1. Definitions. 8 | 9 | "License" shall mean the terms and conditions for use, reproduction, 10 | and distribution as defined by Sections 1 through 9 of this document. 11 | 12 | "Licensor" shall mean the copyright owner or entity authorized by 13 | the copyright owner that is granting the License. 14 | 15 | "Legal Entity" shall mean the union of the acting entity and all 16 | other entities that control, are controlled by, or are under common 17 | control with that entity. For the purposes of this definition, 18 | "control" means (i) the power, direct or indirect, to cause the 19 | direction or management of such entity, whether by contract or 20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 21 | outstanding shares, or (iii) beneficial ownership of such entity. 22 | 23 | "You" (or "Your") shall mean an individual or Legal Entity 24 | exercising permissions granted by this License. 25 | 26 | "Source" form shall mean the preferred form for making modifications, 27 | including but not limited to software source code, documentation 28 | source, and configuration files. 29 | 30 | "Object" form shall mean any form resulting from mechanical 31 | transformation or translation of a Source form, including but 32 | not limited to compiled object code, generated documentation, 33 | and conversions to other media types. 34 | 35 | "Work" shall mean the work of authorship, whether in Source or 36 | Object form, made available under the License, as indicated by a 37 | copyright notice that is included in or attached to the work 38 | (an example is provided in the Appendix below). 39 | 40 | "Derivative Works" shall mean any work, whether in Source or Object 41 | form, that is based on (or derived from) the Work and for which the 42 | editorial revisions, annotations, elaborations, or other modifications 43 | represent, as a whole, an original work of authorship. For the purposes 44 | of this License, Derivative Works shall not include works that remain 45 | separable from, or merely link (or bind by name) to the interfaces of, 46 | the Work and Derivative Works thereof. 47 | 48 | "Contribution" shall mean any work of authorship, including 49 | the original version of the Work and any modifications or additions 50 | to that Work or Derivative Works thereof, that is intentionally 51 | submitted to Licensor for inclusion in the Work by the copyright owner 52 | or by an individual or Legal Entity authorized to submit on behalf of 53 | the copyright owner. For the purposes of this definition, "submitted" 54 | means any form of electronic, verbal, or written communication sent 55 | to the Licensor or its representatives, including but not limited to 56 | communication on electronic mailing lists, source code control systems, 57 | and issue tracking systems that are managed by, or on behalf of, the 58 | Licensor for the purpose of discussing and improving the Work, but 59 | excluding communication that is conspicuously marked or otherwise 60 | designated in writing by the copyright owner as "Not a Contribution." 61 | 62 | "Contributor" shall mean Licensor and any individual or Legal Entity 63 | on behalf of whom a Contribution has been received by Licensor and 64 | subsequently incorporated within the Work. 65 | 66 | 2. Grant of Copyright License. Subject to the terms and conditions of 67 | this License, each Contributor hereby grants to You a perpetual, 68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 69 | copyright license to reproduce, prepare Derivative Works of, 70 | publicly display, publicly perform, sublicense, and distribute the 71 | Work and such Derivative Works in Source or Object form. 72 | 73 | 3. Grant of Patent License. Subject to the terms and conditions of 74 | this License, each Contributor hereby grants to You a perpetual, 75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 76 | (except as stated in this section) patent license to make, have made, 77 | use, offer to sell, sell, import, and otherwise transfer the Work, 78 | where such license applies only to those patent claims licensable 79 | by such Contributor that are necessarily infringed by their 80 | Contribution(s) alone or by combination of their Contribution(s) 81 | with the Work to which such Contribution(s) was submitted. If You 82 | institute patent litigation against any entity (including a 83 | cross-claim or counterclaim in a lawsuit) alleging that the Work 84 | or a Contribution incorporated within the Work constitutes direct 85 | or contributory patent infringement, then any patent licenses 86 | granted to You under this License for that Work shall terminate 87 | as of the date such litigation is filed. 88 | 89 | 4. Redistribution. You may reproduce and distribute copies of the 90 | Work or Derivative Works thereof in any medium, with or without 91 | modifications, and in Source or Object form, provided that You 92 | meet the following conditions: 93 | 94 | (a) You must give any other recipients of the Work or 95 | Derivative Works a copy of this License; and 96 | 97 | (b) You must cause any modified files to carry prominent notices 98 | stating that You changed the files; and 99 | 100 | (c) You must retain, in the Source form of any Derivative Works 101 | that You distribute, all copyright, patent, trademark, and 102 | attribution notices from the Source form of the Work, 103 | excluding those notices that do not pertain to any part of 104 | the Derivative Works; and 105 | 106 | (d) If the Work includes a "NOTICE" text file as part of its 107 | distribution, then any Derivative Works that You distribute must 108 | include a readable copy of the attribution notices contained 109 | within such NOTICE file, excluding those notices that do not 110 | pertain to any part of the Derivative Works, in at least one 111 | of the following places: within a NOTICE text file distributed 112 | as part of the Derivative Works; within the Source form or 113 | documentation, if provided along with the Derivative Works; or, 114 | within a display generated by the Derivative Works, if and 115 | wherever such third-party notices normally appear. The contents 116 | of the NOTICE file are for informational purposes only and 117 | do not modify the License. You may add Your own attribution 118 | notices within Derivative Works that You distribute, alongside 119 | or as an addendum to the NOTICE text from the Work, provided 120 | that such additional attribution notices cannot be construed 121 | as modifying the License. 122 | 123 | You may add Your own copyright statement to Your modifications and 124 | may provide additional or different license terms and conditions 125 | for use, reproduction, or distribution of Your modifications, or 126 | for any such Derivative Works as a whole, provided Your use, 127 | reproduction, and distribution of the Work otherwise complies with 128 | the conditions stated in this License. 129 | 130 | 5. Submission of Contributions. Unless You explicitly state otherwise, 131 | any Contribution intentionally submitted for inclusion in the Work 132 | by You to the Licensor shall be under the terms and conditions of 133 | this License, without any additional terms or conditions. 134 | Notwithstanding the above, nothing herein shall supersede or modify 135 | the terms of any separate license agreement you may have executed 136 | with Licensor regarding such Contributions. 137 | 138 | 6. Trademarks. This License does not grant permission to use the trade 139 | names, trademarks, service marks, or product names of the Licensor, 140 | except as required for reasonable and customary use in describing the 141 | origin of the Work and reproducing the content of the NOTICE file. 142 | 143 | 7. Disclaimer of Warranty. Unless required by applicable law or 144 | agreed to in writing, Licensor provides the Work (and each 145 | Contributor provides its Contributions) on an "AS IS" BASIS, 146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 147 | implied, including, without limitation, any warranties or conditions 148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 149 | PARTICULAR PURPOSE. You are solely responsible for determining the 150 | appropriateness of using or redistributing the Work and assume any 151 | risks associated with Your exercise of permissions under this License. 152 | 153 | 8. Limitation of Liability. In no event and under no legal theory, 154 | whether in tort (including negligence), contract, or otherwise, 155 | unless required by applicable law (such as deliberate and grossly 156 | negligent acts) or agreed to in writing, shall any Contributor be 157 | liable to You for damages, including any direct, indirect, special, 158 | incidental, or consequential damages of any character arising as a 159 | result of this License or out of the use or inability to use the 160 | Work (including but not limited to damages for loss of goodwill, 161 | work stoppage, computer failure or malfunction, or any and all 162 | other commercial damages or losses), even if such Contributor 163 | has been advised of the possibility of such damages. 164 | 165 | 9. Accepting Warranty or Additional Liability. While redistributing 166 | the Work or Derivative Works thereof, You may choose to offer, 167 | and charge a fee for, acceptance of support, warranty, indemnity, 168 | or other liability obligations and/or rights consistent with this 169 | License. However, in accepting such obligations, You may act only 170 | on Your own behalf and on Your sole responsibility, not on behalf 171 | of any other Contributor, and only if You agree to indemnify, 172 | defend, and hold each Contributor harmless for any liability 173 | incurred by, or claims asserted against, such Contributor by reason 174 | of your accepting any such warranty or additional liability. 175 | 176 | END OF TERMS AND CONDITIONS 177 | 178 | APPENDIX: How to apply the Apache License to your work. 179 | 180 | To apply the Apache License to your work, attach the following 181 | boilerplate notice, with the fields enclosed by brackets "[]" 182 | replaced with your own identifying information. (Don't include 183 | the brackets!) The text should be enclosed in the appropriate 184 | comment syntax for the file format. We also recommend that a 185 | file or class name and description of purpose be included on the 186 | same "printed page" as the copyright notice for easier 187 | identification within third-party archives. 188 | 189 | Copyright [yyyy] [name of copyright owner] 190 | 191 | Licensed under the Apache License, Version 2.0 (the "License"); 192 | you may not use this file except in compliance with the License. 193 | You may obtain a copy of the License at 194 | 195 | http://www.apache.org/licenses/LICENSE-2.0 196 | 197 | Unless required by applicable law or agreed to in writing, software 198 | distributed under the License is distributed on an "AS IS" BASIS, 199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 200 | See the License for the specific language governing permissions and 201 | limitations under the License. 202 | -------------------------------------------------------------------------------- /atmosphere-macros/src/schema/column.rs: -------------------------------------------------------------------------------- 1 | use std::hash::Hash; 2 | 3 | use proc_macro2::{Span, TokenStream}; 4 | use quote::{ToTokens, quote}; 5 | use syn::{Field, Ident, Type}; 6 | 7 | use super::keys::{ForeignKey, PrimaryKey}; 8 | 9 | #[derive(Clone, Debug, PartialEq, Eq, Hash)] 10 | pub struct NameSet { 11 | field: Ident, 12 | sql: Option, 13 | } 14 | 15 | impl NameSet { 16 | pub fn new(field: Ident, sql: Option) -> Self { 17 | Self { field, sql } 18 | } 19 | 20 | pub fn field(&self) -> &Ident { 21 | &self.field 22 | } 23 | 24 | pub fn sql(&self) -> &Ident { 25 | self.sql.as_ref().unwrap_or(&self.field) 26 | } 27 | } 28 | 29 | #[derive(Clone, Debug, PartialEq, Eq, Hash)] 30 | pub struct ColumnModifiers { 31 | pub unique: bool, 32 | pub json: bool, 33 | } 34 | 35 | #[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)] 36 | pub enum TimestampKind { 37 | Created, 38 | Updated, 39 | Deleted, 40 | } 41 | 42 | impl ToTokens for TimestampKind { 43 | fn to_tokens(&self, tokens: &mut TokenStream) { 44 | let path = match self { 45 | Self::Created => quote!(::atmosphere::column::TimestampKind::Created), 46 | Self::Updated => quote!(::atmosphere::column::TimestampKind::Updated), 47 | Self::Deleted => quote!(::atmosphere::column::TimestampKind::Deleted), 48 | }; 49 | 50 | tokens.extend(path); 51 | } 52 | } 53 | 54 | #[derive(Clone, Debug, PartialEq, Eq, Hash)] 55 | pub struct TimestampColumn { 56 | pub modifiers: ColumnModifiers, 57 | pub kind: TimestampKind, 58 | pub name: NameSet, 59 | pub ty: Type, 60 | } 61 | 62 | impl TimestampColumn { 63 | pub fn quote(&self) -> TokenStream { 64 | let kind = self.kind; 65 | let field = self.name.field().to_string(); 66 | let sql = self.name.sql().to_string(); 67 | 68 | quote!(::atmosphere::TimestampColumn::new( 69 | #kind, 70 | #field, 71 | #sql 72 | )) 73 | } 74 | } 75 | 76 | #[derive(Clone, Debug, PartialEq, Eq, Hash)] 77 | pub struct DataColumn { 78 | pub modifiers: ColumnModifiers, 79 | pub name: NameSet, 80 | pub ty: Type, 81 | } 82 | 83 | impl DataColumn { 84 | pub fn quote(&self) -> TokenStream { 85 | let field = self.name.field(); 86 | let sql = self.name.sql(); 87 | 88 | quote!(::atmosphere::DataColumn::new( 89 | stringify!(#field), 90 | stringify!(#sql) 91 | )) 92 | } 93 | } 94 | 95 | #[derive(Clone, Debug, PartialEq, Eq)] 96 | pub enum Column { 97 | PrimaryKey(PrimaryKey), 98 | ForeignKey(ForeignKey), 99 | Data(DataColumn), 100 | Timestamp(TimestampColumn), 101 | } 102 | 103 | impl Hash for Column { 104 | fn hash(&self, state: &mut H) { 105 | self.name().field().to_string().hash(state); 106 | } 107 | } 108 | 109 | impl Column { 110 | pub fn quote(&self) -> TokenStream { 111 | match self { 112 | Self::PrimaryKey(pk) => pk.quote(), 113 | Self::ForeignKey(fk) => fk.quote(), 114 | Self::Data(data) => data.quote(), 115 | Self::Timestamp(time) => time.quote(), 116 | } 117 | } 118 | 119 | pub fn ty(&self) -> &syn::Type { 120 | match self { 121 | Self::PrimaryKey(pk) => &pk.ty, 122 | Self::ForeignKey(fk) => &fk.ty, 123 | Self::Data(data) => &data.ty, 124 | Self::Timestamp(ts) => &ts.ty, 125 | } 126 | } 127 | } 128 | 129 | pub mod attribute { 130 | use syn::{Error, Ident, LitStr, Token, parse::Parse}; 131 | 132 | use super::{ColumnModifiers, TimestampKind}; 133 | 134 | pub const PATH: &str = "sql"; 135 | 136 | const PRIMARY_KEY: &str = "pk"; 137 | const FOREIGN_KEY: &str = "fk"; 138 | const UNIQUE: &str = "unique"; 139 | const JSON: &str = "json"; 140 | const TIMESTAMP: &str = "timestamp"; 141 | 142 | const TIMESTAMP_CREATED: &str = "created"; 143 | const TIMESTAMP_UPDATED: &str = "updated"; 144 | const TIMESTAMP_DELETED: &str = "deleted"; 145 | 146 | #[derive(Clone, Debug, PartialEq, Eq, Hash)] 147 | pub enum ColumnKind { 148 | PrimaryKey, 149 | ForeignKey { on: Ident }, 150 | Data, 151 | Timestamp { kind: TimestampKind }, 152 | } 153 | 154 | impl Parse for ColumnKind { 155 | fn parse(input: syn::parse::ParseStream) -> syn::Result { 156 | let mut kind = ColumnKind::Data; 157 | 158 | if let Some((id, _)) = input.cursor().ident() { 159 | match id.to_string().as_str() { 160 | PRIMARY_KEY => { 161 | let _: Ident = input.parse()?; 162 | 163 | kind = ColumnKind::PrimaryKey; 164 | } 165 | FOREIGN_KEY => { 166 | let _: Ident = input.parse()?; 167 | 168 | input.parse::()?; 169 | input.parse::]>()?; 170 | 171 | let on = input.parse()?; 172 | 173 | kind = ColumnKind::ForeignKey { on } 174 | } 175 | TIMESTAMP => { 176 | let _: Ident = input.parse()?; 177 | 178 | input.parse::()?; 179 | 180 | let ty: Ident = input.parse()?; 181 | 182 | let ty = match ty.to_string().as_ref() { 183 | TIMESTAMP_CREATED => TimestampKind::Created, 184 | TIMESTAMP_UPDATED => TimestampKind::Updated, 185 | TIMESTAMP_DELETED => TimestampKind::Deleted, 186 | _ => { 187 | return Err(syn::Error::new_spanned( 188 | ty, 189 | "`#[sql(timestamp = )]` only supports `created`. `updated` and `deleted`", 190 | )); 191 | } 192 | }; 193 | 194 | kind = ColumnKind::Timestamp { kind: ty } 195 | } 196 | _ => {} 197 | }; 198 | 199 | if kind != ColumnKind::Data && input.peek(Token![,]) { 200 | input.parse::()?; 201 | } 202 | } 203 | 204 | Ok(kind) 205 | } 206 | } 207 | 208 | #[derive(Clone, Debug, PartialEq, Eq, Hash)] 209 | pub struct Attribute { 210 | pub kind: ColumnKind, 211 | pub modifiers: ColumnModifiers, 212 | pub renamed: Option, 213 | } 214 | 215 | impl Parse for Attribute { 216 | fn parse(input: syn::parse::ParseStream) -> syn::Result { 217 | let kind: ColumnKind = input.parse()?; 218 | 219 | let mut modifiers = ColumnModifiers { 220 | unique: false, 221 | json: false, 222 | }; 223 | let mut renamed = None; 224 | 225 | while !input.is_empty() { 226 | let ident: syn::Ident = input.parse()?; 227 | 228 | { 229 | let span = ident.span(); 230 | let ident = ident.to_string(); 231 | 232 | // we found a tag 233 | if ident == UNIQUE { 234 | if modifiers.unique { 235 | return Err(Error::new(span, "found redundant `unique` modifier")); 236 | } 237 | 238 | modifiers.unique = true; 239 | 240 | if !input.peek(Token![,]) { 241 | break; 242 | } 243 | 244 | input.parse::()?; 245 | 246 | continue; 247 | } else if ident == JSON { 248 | if modifiers.json { 249 | return Err(Error::new(span, "found redundant `json` modifier")); 250 | } 251 | 252 | modifiers.json = true; 253 | 254 | if !input.peek(Token![,]) { 255 | break; 256 | } 257 | 258 | input.parse::()?; 259 | 260 | continue; 261 | } 262 | } 263 | 264 | // we found a kv pair 265 | input.parse::()?; 266 | let value: LitStr = input.parse()?; 267 | 268 | match ident.to_string().as_str() { 269 | "rename" => renamed = Some(Ident::new(&value.value(), value.span())), 270 | _ => return Err(syn::Error::new_spanned(ident, "")), 271 | } 272 | 273 | if !input.peek(Token![,]) { 274 | break; 275 | } 276 | 277 | input.parse::()?; 278 | } 279 | 280 | Ok(Self { 281 | kind, 282 | modifiers, 283 | renamed, 284 | }) 285 | } 286 | } 287 | } 288 | 289 | impl TryFrom for Column { 290 | type Error = syn::Error; 291 | 292 | fn try_from(field: Field) -> syn::Result { 293 | let name = field.ident.ok_or(syn::Error::new( 294 | Span::call_site(), 295 | "only named fields are supported", 296 | ))?; 297 | 298 | let ty = field.ty; 299 | 300 | let attribute = field 301 | .attrs 302 | .iter() 303 | .find(|a| a.path().is_ident(attribute::PATH)); 304 | 305 | let Some(attribute) = attribute else { 306 | return Ok(Self::Data(DataColumn { 307 | modifiers: ColumnModifiers { 308 | unique: false, 309 | json: false, 310 | }, 311 | name: NameSet::new(name, None), 312 | ty, 313 | })); 314 | }; 315 | 316 | let attribute: attribute::Attribute = attribute.parse_args()?; 317 | 318 | let modifiers = attribute.modifiers; 319 | let name = NameSet::new(name, attribute.renamed); 320 | 321 | match attribute.kind { 322 | attribute::ColumnKind::PrimaryKey => Ok(Self::PrimaryKey(PrimaryKey { 323 | modifiers: ColumnModifiers { 324 | unique: true, 325 | json: false, 326 | }, 327 | name, 328 | ty, 329 | })), 330 | attribute::ColumnKind::ForeignKey { on } => Ok(Self::ForeignKey(ForeignKey { 331 | on, 332 | modifiers, 333 | name, 334 | ty, 335 | })), 336 | attribute::ColumnKind::Data => Ok(Self::Data(DataColumn { 337 | modifiers, 338 | name, 339 | ty, 340 | })), 341 | attribute::ColumnKind::Timestamp { kind } => Ok(Self::Timestamp(TimestampColumn { 342 | modifiers, 343 | kind, 344 | name, 345 | ty, 346 | })), 347 | } 348 | } 349 | } 350 | 351 | impl Column { 352 | pub fn name(&self) -> &NameSet { 353 | match self { 354 | Self::PrimaryKey(pk) => &pk.name, 355 | Self::ForeignKey(fk) => &fk.name, 356 | Self::Data(data) => &data.name, 357 | Self::Timestamp(ts) => &ts.name, 358 | } 359 | } 360 | } 361 | 362 | /// Utility implementations for determining the enum type 363 | impl Column { 364 | pub const fn as_primary_key(&self) -> Option<&PrimaryKey> { 365 | match self { 366 | Self::PrimaryKey(pk) => Some(pk), 367 | _ => None, 368 | } 369 | } 370 | 371 | pub const fn as_foreign_key(&self) -> Option<&ForeignKey> { 372 | match self { 373 | Self::ForeignKey(fk) => Some(fk), 374 | _ => None, 375 | } 376 | } 377 | 378 | pub const fn as_data_column(&self) -> Option<&DataColumn> { 379 | match self { 380 | Self::Data(c) => Some(c), 381 | _ => None, 382 | } 383 | } 384 | 385 | pub const fn as_timestamp_column(&self) -> Option<&TimestampColumn> { 386 | match self { 387 | Self::Timestamp(c) => Some(c), 388 | _ => None, 389 | } 390 | } 391 | } 392 | -------------------------------------------------------------------------------- /atmosphere-core/src/runtime/sql.rs: -------------------------------------------------------------------------------- 1 | //! # SQL Code Generation 2 | //! 3 | //! This submodule provides essential constructors for SQL queries tailored to the operations 4 | //! performed within the Atmosphere framework. It includes functionalities for dynamically building 5 | //! queries for CRUD (Create, Read, Update, Delete) operations, and managing bindings between SQL 6 | //! queries and table entities. 7 | //! 8 | //! ## Key features: 9 | //! 10 | //! - Query Builders: Functions like `select`, `insert`, `update`, `delete`, and `upsert`, which create SQL 11 | //! queries for their respective operations. These builders ensure that queries are correctly formatted and 12 | //! aligned with the structure and constraints of the target table. 13 | //! 14 | //! - Binding Management: The `Bindings` struct and its implementations, which manage the relationship between 15 | //! table columns and the SQL queries they are bound to. This ensures that queries are executed with the correct 16 | //! parameters and their values. 17 | 18 | use std::fmt; 19 | 20 | use sqlx::QueryBuilder; 21 | 22 | use crate::{ 23 | Bind, Column, 24 | query::{self, Query}, 25 | }; 26 | 27 | /// Struct representing bindings for SQL queries. 28 | /// 29 | /// `Bindings` is responsible for holding a collection of columns that are bound to a specific SQL query. 30 | /// It encapsulates the necessary details for each column, such as field names and SQL representations, 31 | /// ensuring accurate and efficient binding of data to the query. 32 | pub struct Bindings(Vec>); 33 | 34 | impl PartialEq for Bindings { 35 | fn eq(&self, other: &Self) -> bool { 36 | if self.0.len() != other.0.len() { 37 | return false; 38 | } 39 | 40 | for (i, a) in self.0.iter().enumerate() { 41 | let Some(b) = other.0.get(i) else { 42 | return false; 43 | }; 44 | 45 | if a.field() != b.field() { 46 | return false; 47 | } 48 | 49 | if a.sql() != b.sql() { 50 | return false; 51 | } 52 | } 53 | 54 | true 55 | } 56 | } 57 | 58 | impl Eq for Bindings {} 59 | 60 | impl fmt::Debug for Bindings { 61 | fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { 62 | let mut f = f.debug_tuple("Bindings"); 63 | 64 | for c in &self.0 { 65 | f.field(&c.field()); 66 | } 67 | 68 | f.finish() 69 | } 70 | } 71 | 72 | impl Bindings { 73 | pub fn columns(&self) -> &[Column] { 74 | &self.0 75 | } 76 | 77 | pub fn empty() -> Self { 78 | Self(vec![]) 79 | } 80 | } 81 | 82 | fn table() -> String { 83 | #[cfg(not(feature = "sqlite"))] 84 | return format!("\"{}\".\"{}\"", T::SCHEMA, T::TABLE); 85 | 86 | #[cfg(feature = "sqlite")] 87 | return format!("\"{}\"", T::TABLE); 88 | } 89 | 90 | /// Generates a `SELECT` query to retrieve a single row from the table based on its primary key. 91 | /// 92 | /// SQL: `SELECT * FROM .. WHERE .. = $1` 93 | pub fn select() -> Query { 94 | select_by(Column::PrimaryKey(&T::PRIMARY_KEY)) 95 | } 96 | 97 | /// Creates a `SELECT` query to retrieve rows from the table based on a specific column. 98 | /// 99 | /// SQL: `SELECT * FROM .. WHERE .. = $1` 100 | pub fn select_by(c: Column) -> Query { 101 | let mut query = QueryBuilder::new("SELECT\n "); 102 | 103 | let mut separated = query.separated(",\n "); 104 | 105 | separated.push(T::PRIMARY_KEY.sql); 106 | 107 | for fk in T::FOREIGN_KEYS { 108 | separated.push(fk.sql); 109 | } 110 | 111 | for data in T::DATA_COLUMNS { 112 | separated.push(data.sql); 113 | } 114 | 115 | for meta in T::TIMESTAMP_COLUMNS { 116 | separated.push(meta.sql); 117 | } 118 | 119 | query.push(format!("\nFROM\n {}\n", table::())); 120 | query.push(format!("WHERE {} = $1", c.sql())); 121 | 122 | Query::new( 123 | query::Operation::Select, 124 | query::Cardinality::One, 125 | query, 126 | Bindings(vec![c]), 127 | ) 128 | } 129 | 130 | /// Constructs a `SELECT` query to fetch all rows from the table. 131 | /// 132 | /// SQL: `SELECT * FROM ..` 133 | pub fn select_all() -> Query { 134 | let mut query = QueryBuilder::new("SELECT\n "); 135 | 136 | let mut separated = query.separated(",\n "); 137 | 138 | separated.push(T::PRIMARY_KEY.sql); 139 | 140 | for fk in T::FOREIGN_KEYS { 141 | separated.push(fk.sql); 142 | } 143 | 144 | for data in T::DATA_COLUMNS { 145 | separated.push(data.sql); 146 | } 147 | 148 | for meta in T::TIMESTAMP_COLUMNS { 149 | separated.push(meta.sql); 150 | } 151 | 152 | query.push(format!("\nFROM\n {}\n", table::())); 153 | 154 | Query::new( 155 | query::Operation::Select, 156 | query::Cardinality::Many, 157 | query, 158 | Bindings::empty(), 159 | ) 160 | } 161 | 162 | /// Generates an `INSERT` query to add a new row to the table. 163 | /// 164 | /// SQL: `INSERT INTO .. VALUES ..` 165 | pub fn insert() -> Query { 166 | let mut builder = QueryBuilder::new(format!("INSERT INTO {}\n (", table::())); 167 | 168 | let mut bindings = vec![]; 169 | 170 | let mut separated = builder.separated(", "); 171 | 172 | separated.push(T::PRIMARY_KEY.sql.to_string()); 173 | bindings.push(Column::PrimaryKey(&T::PRIMARY_KEY)); 174 | 175 | for fk in T::FOREIGN_KEYS { 176 | separated.push(fk.sql.to_string()); 177 | bindings.push(Column::ForeignKey(fk)); 178 | } 179 | 180 | for data in T::DATA_COLUMNS { 181 | separated.push(data.sql.to_string()); 182 | bindings.push(Column::Data(data)); 183 | } 184 | 185 | for meta in T::TIMESTAMP_COLUMNS { 186 | separated.push(meta.sql.to_string()); 187 | bindings.push(Column::Timestamp(meta)); 188 | } 189 | 190 | separated.push_unseparated(")\nVALUES\n ("); 191 | 192 | separated.push_unseparated("$1"); 193 | 194 | let columns = 1 + T::FOREIGN_KEYS.len() + T::DATA_COLUMNS.len() + T::TIMESTAMP_COLUMNS.len(); 195 | 196 | for c in 2..=columns { 197 | separated.push(format!("${c}")); 198 | } 199 | 200 | builder.push(")"); 201 | 202 | Query::new( 203 | query::Operation::Insert, 204 | query::Cardinality::One, 205 | builder, 206 | Bindings(bindings), 207 | ) 208 | } 209 | 210 | /// Creates an `UPDATE` query to modify an existing row in the table. 211 | /// 212 | /// SQL: `UPDATE .. SET .. WHERE ..` 213 | pub fn update() -> Query { 214 | let mut builder = QueryBuilder::new(format!("UPDATE {} SET\n ", table::())); 215 | let mut bindings = vec![]; 216 | 217 | let mut separated = builder.separated(",\n "); 218 | 219 | separated.push(format!("{} = $1", T::PRIMARY_KEY.sql)); 220 | bindings.push(Column::PrimaryKey(&T::PRIMARY_KEY)); 221 | 222 | let mut col = 2; 223 | 224 | for fk in T::FOREIGN_KEYS { 225 | separated.push(format!("{} = ${col}", fk.sql)); 226 | bindings.push(Column::ForeignKey(fk)); 227 | col += 1; 228 | } 229 | 230 | for data in T::DATA_COLUMNS { 231 | separated.push(format!("{} = ${col}", data.sql)); 232 | bindings.push(Column::Data(data)); 233 | col += 1; 234 | } 235 | 236 | for meta in T::TIMESTAMP_COLUMNS { 237 | separated.push(format!("{} = ${col}", meta.sql)); 238 | bindings.push(Column::Timestamp(meta)); 239 | col += 1; 240 | } 241 | 242 | builder.push(format!("\nWHERE\n {} = $1", T::PRIMARY_KEY.sql)); 243 | 244 | Query::new( 245 | query::Operation::Update, 246 | query::Cardinality::One, 247 | builder, 248 | Bindings(bindings), 249 | ) 250 | } 251 | 252 | /// Constructs an `UPSERT` query (update or insert) for a row in the table. 253 | /// 254 | /// SQL: `UPDATE .. SET .. WHERE .. ON CONFLICT .. DO UPDATE SET` 255 | pub fn upsert() -> Query { 256 | let Query { 257 | mut builder, 258 | bindings, 259 | .. 260 | } = insert::(); 261 | 262 | builder.push("\nON CONFLICT("); 263 | builder.push(T::PRIMARY_KEY.sql); 264 | builder.push(")\nDO UPDATE SET\n "); 265 | 266 | let mut separated = builder.separated(",\n "); 267 | 268 | for fk in T::FOREIGN_KEYS { 269 | separated.push(format!("{} = EXCLUDED.{}", fk.sql, fk.sql)); 270 | } 271 | 272 | for data in T::DATA_COLUMNS { 273 | separated.push(format!("{} = EXCLUDED.{}", data.sql, data.sql)); 274 | } 275 | 276 | for meta in T::TIMESTAMP_COLUMNS { 277 | separated.push(format!("{} = EXCLUDED.{}", meta.sql, meta.sql)); 278 | } 279 | 280 | Query::new( 281 | query::Operation::Upsert, 282 | query::Cardinality::One, 283 | builder, 284 | bindings, 285 | ) 286 | } 287 | 288 | /// Generates a `DELETE` query to remove a row from the table based on its primary key. 289 | /// 290 | /// SQL: `DELETE FROM .. WHERE ..` 291 | pub fn delete() -> Query { 292 | delete_by(T::PRIMARY_KEY.as_col()) 293 | } 294 | 295 | /// Creates a `DELETE` query to remove rows from the table based on a specific column. 296 | /// 297 | /// SQL: `DELETE FROM .. WHERE ..` 298 | pub fn delete_by(c: Column) -> Query { 299 | let mut builder = QueryBuilder::new(format!("DELETE FROM {} WHERE ", table::())); 300 | 301 | builder.push(c.sql()); 302 | builder.push(" = $1"); 303 | 304 | Query::new( 305 | query::Operation::Delete, 306 | query::Cardinality::One, 307 | builder, 308 | Bindings(vec![Column::PrimaryKey(&T::PRIMARY_KEY)]), 309 | ) 310 | } 311 | 312 | #[cfg(test)] 313 | mod tests { 314 | use crate::{ 315 | Bind, Bindable, Column, DataColumn, ForeignKey, PrimaryKey, Table, TimestampColumn, 316 | runtime::sql::{self, Bindings}, 317 | }; 318 | 319 | #[derive(sqlx::FromRow)] 320 | #[allow(unused)] 321 | struct TestTable { 322 | id: i32, 323 | fk: i32, 324 | data: bool, 325 | } 326 | 327 | impl Table for TestTable { 328 | type PrimaryKey = i32; 329 | 330 | const SCHEMA: &'static str = "public"; 331 | const TABLE: &'static str = "test"; 332 | 333 | const PRIMARY_KEY: PrimaryKey = PrimaryKey::new("id", "id_sql_col"); 334 | const FOREIGN_KEYS: &'static [ForeignKey] = &[ForeignKey::new("fk", "fk_sql_col")]; 335 | const DATA_COLUMNS: &'static [DataColumn] = 336 | &[DataColumn::new("data", "data_sql_col")]; 337 | const TIMESTAMP_COLUMNS: &'static [TimestampColumn] = &[]; 338 | 339 | fn pk(&self) -> &Self::PrimaryKey { 340 | &self.id 341 | } 342 | } 343 | 344 | impl Bind for TestTable { 345 | fn bind<'q, Q: Bindable<'q>>(&'q self, c: &'q Column, query: Q) -> crate::Result { 346 | match c.field() { 347 | "id" => Ok(query.dyn_bind(self.id)), 348 | "fk" => Ok(query.dyn_bind(self.fk)), 349 | "data" => Ok(query.dyn_bind(self.data)), 350 | _ => unimplemented!(), 351 | } 352 | } 353 | } 354 | 355 | #[test] 356 | fn select() { 357 | let sql::Query { 358 | builder, bindings, .. 359 | } = sql::select::(); 360 | 361 | assert_eq!( 362 | builder.sql(), 363 | "SELECT\n id_sql_col,\n fk_sql_col,\n data_sql_col\nFROM\n \"public\".\"test\"\nWHERE id_sql_col = $1" 364 | ); 365 | 366 | assert_eq!( 367 | bindings, 368 | Bindings(vec![Column::PrimaryKey(&TestTable::PRIMARY_KEY),]) 369 | ); 370 | } 371 | 372 | #[test] 373 | fn insert() { 374 | let sql::Query { 375 | builder, bindings, .. 376 | } = sql::insert::(); 377 | 378 | assert_eq!( 379 | builder.sql(), 380 | "INSERT INTO \"public\".\"test\"\n (id_sql_col, fk_sql_col, data_sql_col)\nVALUES\n ($1, $2, $3)" 381 | ); 382 | 383 | assert_eq!( 384 | bindings, 385 | Bindings(vec![ 386 | Column::PrimaryKey(&TestTable::PRIMARY_KEY), 387 | Column::ForeignKey(&TestTable::FOREIGN_KEYS[0]), 388 | Column::Data(&TestTable::DATA_COLUMNS[0]), 389 | ]) 390 | ); 391 | } 392 | 393 | #[test] 394 | fn update() { 395 | let sql::Query { 396 | builder, bindings, .. 397 | } = sql::update::(); 398 | 399 | assert_eq!( 400 | builder.sql(), 401 | "UPDATE \"public\".\"test\" SET\n id_sql_col = $1,\n fk_sql_col = $2,\n data_sql_col = $3\nWHERE\n id_sql_col = $1" 402 | ); 403 | 404 | assert_eq!( 405 | bindings, 406 | Bindings(vec![ 407 | Column::PrimaryKey(&TestTable::PRIMARY_KEY), 408 | Column::ForeignKey(&TestTable::FOREIGN_KEYS[0]), 409 | Column::Data(&TestTable::DATA_COLUMNS[0]), 410 | ]) 411 | ); 412 | } 413 | 414 | #[test] 415 | fn upsert() { 416 | let sql::Query { 417 | builder, bindings, .. 418 | } = sql::upsert::(); 419 | 420 | assert_eq!( 421 | builder.sql(), 422 | "INSERT INTO \"public\".\"test\"\n (id_sql_col, fk_sql_col, data_sql_col)\nVALUES\n ($1, $2, $3)\nON CONFLICT(id_sql_col)\nDO UPDATE SET\n fk_sql_col = EXCLUDED.fk_sql_col,\n data_sql_col = EXCLUDED.data_sql_col" 423 | ); 424 | 425 | assert_eq!( 426 | bindings, 427 | Bindings(vec![ 428 | Column::PrimaryKey(&TestTable::PRIMARY_KEY), 429 | Column::ForeignKey(&TestTable::FOREIGN_KEYS[0]), 430 | Column::Data(&TestTable::DATA_COLUMNS[0]), 431 | ]) 432 | ); 433 | } 434 | 435 | #[test] 436 | fn delete() { 437 | let sql::Query { 438 | builder, bindings, .. 439 | } = sql::delete::(); 440 | 441 | assert_eq!( 442 | builder.sql(), 443 | "DELETE FROM \"public\".\"test\" WHERE id_sql_col = $1" 444 | ); 445 | assert_eq!( 446 | bindings, 447 | Bindings(vec![Column::PrimaryKey(&TestTable::PRIMARY_KEY),]) 448 | ); 449 | } 450 | } 451 | --------------------------------------------------------------------------------