├── .github ├── package └── workflows │ ├── release.yaml │ └── test.yaml ├── .gitignore ├── .rustfmt.toml ├── Cargo.toml ├── Justfile ├── LICENSE.md ├── README.md ├── attr ├── Cargo.toml ├── Justfile └── src │ ├── cfg_attr.rs │ ├── derive.rs │ ├── error.rs │ ├── ext.rs │ ├── ident.rs │ ├── lib.rs │ ├── metadata │ ├── column.rs │ ├── insert.rs │ ├── mod.rs │ ├── model.rs │ └── table.rs │ ├── repr.rs │ └── ttype.rs ├── cli ├── Cargo.toml ├── Justfile └── src │ ├── command.rs │ ├── command │ ├── down.rs │ ├── info.rs │ ├── init.rs │ ├── migrate.rs │ └── up.rs │ ├── config.rs │ ├── main.rs │ ├── schema.rs │ └── util.rs ├── core ├── Cargo.toml ├── Justfile ├── README.md └── src │ ├── config.rs │ ├── error.rs │ ├── insert.rs │ ├── join.rs │ ├── lib.rs │ ├── model.rs │ ├── query_builder │ ├── args.rs │ ├── mod.rs │ ├── placeholder.rs │ ├── select.rs │ └── util.rs │ └── schema.rs ├── macro ├── Cargo.toml ├── Justfile ├── README.md └── src │ ├── codegen.rs │ ├── codegen │ ├── common.rs │ ├── from_row.rs │ ├── insert.rs │ ├── insert_model.rs │ ├── into_arguments.rs │ ├── join_description.rs │ ├── meta.rs │ ├── model.rs │ ├── model_builder.rs │ ├── mysql.rs │ ├── postgres.rs │ ├── select.rs │ ├── sqlite.rs │ └── update.rs │ ├── lib.rs │ └── util.rs └── ormlite ├── Cargo.toml ├── Justfile ├── README.md ├── examples └── expandable.rs ├── src ├── lib.rs └── model.rs └── tests ├── incomplete_tests └── multiple_databases │ ├── 03-many-to-many.rs │ ├── 04-one-to-many.rs │ ├── main.rs │ └── multiple-databases.rs ├── multifile ├── main.rs ├── organization.rs └── user.rs ├── postgres.rs ├── postgres ├── complex.rs └── join.rs ├── run.rs ├── setup.rs ├── simple.rs ├── sqlite.rs └── sqlite ├── 01-table-meta.rs ├── 02-update-partial.rs ├── 03-many-to-one-join.rs ├── 04-allow-clone-primary-key.rs ├── 05-keyword-column.rs └── 06-insert.rs /.github/package: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | set -euxo pipefail 4 | 5 | 6 | VERSION=${REF#"refs/tags/"} 7 | DIST=`pwd`/dist 8 | # This var can be modified if need be. 9 | BIN=${GITHUB_REPOSITORY##*/} 10 | 11 | echo "Packaging $BIN $VERSION for $TARGET..." 12 | 13 | test -f Cargo.lock || cargo generate-lockfile 14 | 15 | echo "All gcc" 16 | compgen -c | grep gcc 17 | 18 | echo "Building $BIN..." 19 | RUSTFLAGS="--deny warnings --codegen target-feature=+crt-static $TARGET_RUSTFLAGS" \ 20 | cargo build --bin $BIN --target $TARGET --release 21 | EXECUTABLE=target/$TARGET/release/$BIN 22 | 23 | if [[ $OS == windows-latest ]]; then 24 | EXECUTABLE=$EXECUTABLE.exe 25 | fi 26 | 27 | echo "Copying release files..." 28 | mkdir dist 29 | cp \ 30 | $EXECUTABLE \ 31 | Cargo.lock \ 32 | Cargo.toml \ 33 | LICENSE.md \ 34 | README.md \ 35 | $DIST 36 | 37 | cd $DIST 38 | echo "Creating release archive..." 39 | case $OS in 40 | ubuntu-latest | macos-latest) 41 | ARCHIVE=$DIST/$BIN-$VERSION-$TARGET.tar.gz 42 | tar czf $ARCHIVE * 43 | echo "::set-output name=archive::$ARCHIVE" 44 | ;; 45 | windows-latest) 46 | ARCHIVE=$DIST/$BIN-$VERSION-$TARGET.zip 47 | 7z a $ARCHIVE * 48 | echo "::set-output name=archive::`pwd -W`/$BIN-$VERSION-$TARGET.zip" 49 | ;; 50 | esac 51 | -------------------------------------------------------------------------------- /.github/workflows/release.yaml: -------------------------------------------------------------------------------- 1 | name: release 2 | 3 | on: 4 | push: 5 | tags: 6 | - '*' 7 | 8 | defaults: 9 | run: 10 | shell: bash 11 | 12 | jobs: 13 | all: 14 | name: All 15 | 16 | strategy: 17 | matrix: 18 | target: 19 | - aarch64-unknown-linux-musl 20 | - armv7-unknown-linux-musleabihf 21 | - x86_64-apple-darwin 22 | - x86_64-pc-windows-msvc 23 | - x86_64-unknown-linux-musl 24 | include: 25 | - target: aarch64-unknown-linux-musl 26 | os: ubuntu-latest 27 | native: false 28 | target_rustflags: '--codegen linker=aarch64-linux-gnu-gcc' 29 | - target: armv7-unknown-linux-musleabihf 30 | os: ubuntu-latest 31 | native: false 32 | target_rustflags: '--codegen linker=arm-linux-gnueabihf-gcc' 33 | - target: x86_64-apple-darwin 34 | os: macos-latest 35 | native: true 36 | target_rustflags: '' 37 | - target: x86_64-pc-windows-msvc 38 | os: windows-latest 39 | native: true 40 | target_rustflags: '' 41 | - target: x86_64-unknown-linux-musl 42 | os: ubuntu-latest 43 | native: true 44 | target_rustflags: '' 45 | 46 | runs-on: ${{matrix.os}} 47 | 48 | steps: 49 | - uses: actions/checkout@v2 50 | 51 | - name: Install Rust Toolchain Components 52 | uses: actions-rs/toolchain@v1 53 | with: 54 | override: true 55 | target: ${{ matrix.target }} 56 | toolchain: stable 57 | 58 | - uses: Swatinem/rust-cache@v1 59 | 60 | - name: Install AArch64 Toolchain 61 | if: ${{ matrix.target == 'aarch64-unknown-linux-musl' }} 62 | run: | 63 | sudo apt-get update 64 | sudo apt-get install gcc-aarch64-linux-gnu 65 | echo "TARGET_CC=aarch64-linux-gnu-gcc" >> "$GITHUB_ENV" 66 | - name: Install ARM7 Toolchain 67 | if: ${{ matrix.target == 'armv7-unknown-linux-musleabihf' }} 68 | run: | 69 | sudo apt-get update 70 | sudo apt-get install gcc-arm-linux-gnueabihf 71 | echo "TARGET_CC=arm-linux-gnueabihf-gcc" >> "$GITHUB_ENV" 72 | - name: Install musl toolchain 73 | if: ${{ matrix.target == 'x86_64-unknown-linux-musl' }} 74 | run: | 75 | sudo apt update 76 | sudo apt install musl-tools 77 | - name: Test 78 | if: matrix.native 79 | run: | 80 | env 81 | cargo test --all --target ${{ matrix.target }} 82 | 83 | - name: Package 84 | id: package 85 | env: 86 | TARGET: ${{ matrix.target }} 87 | REF: ${{ github.ref }} 88 | OS: ${{ matrix.os }} 89 | TARGET_RUSTFLAGS: ${{ matrix.target_rustflags }} 90 | run: ./.github/package 91 | shell: bash 92 | 93 | - name: Publish Archive 94 | uses: softprops/action-gh-release@v0.1.5 95 | if: ${{ startsWith(github.ref, 'refs/tags/') }} 96 | with: 97 | draft: false 98 | files: ${{ steps.package.outputs.archive }} 99 | prerelease: ${{ contains(github.ref_name, '-') }} ## looking for a bare semver number rather than a tagged one like 1.0.0-beta.2 100 | env: 101 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} 102 | 103 | - name: Publish Changelog 104 | uses: softprops/action-gh-release@v0.1.5 105 | if: >- 106 | ${{ 107 | startsWith(github.ref, 'refs/tags/') 108 | && matrix.target == 'x86_64-unknown-linux-musl' 109 | }} 110 | with: 111 | draft: false 112 | files: CHANGELOG.md 113 | prerelease: ${{ contains(github.ref_name, '-') }} ## looking for a bare semver number rather than a tagged one like 1.0.0-beta.2 114 | env: 115 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} -------------------------------------------------------------------------------- /.github/workflows/test.yaml: -------------------------------------------------------------------------------- 1 | name: test 2 | 3 | on: 4 | push: 5 | branches: 6 | - master 7 | pull_request: 8 | branches: 9 | - master 10 | 11 | 12 | defaults: 13 | run: 14 | shell: bash 15 | 16 | jobs: 17 | run-test: 18 | permissions: 19 | contents: read 20 | runs-on: ubuntu-latest 21 | 22 | steps: 23 | - uses: actions/checkout@v2 24 | - uses: actions-rs/toolchain@v1 25 | with: 26 | toolchain: stable 27 | - uses: Swatinem/rust-cache@v1 28 | - run: cargo install just 29 | - run: just test -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | /target 2 | expanded.rs 3 | 4 | .env* 5 | !.env.example 6 | expand.rs 7 | .cargo 8 | Cargo.lock 9 | -------------------------------------------------------------------------------- /.rustfmt.toml: -------------------------------------------------------------------------------- 1 | max_width = 120 2 | -------------------------------------------------------------------------------- /Cargo.toml: -------------------------------------------------------------------------------- 1 | [workspace] 2 | resolver = "2" 3 | members = ["macro", "core", "ormlite", "cli"] 4 | 5 | [workspace.dependencies] 6 | anyhow = "1" 7 | futures = "0.3.31" 8 | indexmap = { version = "2.7.0", features = ["serde"] } 9 | proc-macro2 = "1.0.92" 10 | serde = { version = "1.0.217", features = ["derive"] } 11 | sqlmo = "0.23" 12 | sqlmo_sqlx = "0.23" 13 | sqlx = "0.8.2" 14 | sqlx-core = "0.8.2" 15 | syn = { version = "2.0.94", features = ["full"] } 16 | ormlite-core = { path = "core", version = "0.23" } 17 | ormlite-attr = { path = "attr", version = "0.23" } 18 | ormlite-macro = { path = "macro", version = "0.23" } 19 | ormlite = { path = "ormlite", version = "0.23" } 20 | convert_case = "0.7.1" 21 | quote = "1.0" 22 | itertools = "0.14.0" 23 | -------------------------------------------------------------------------------- /Justfile: -------------------------------------------------------------------------------- 1 | set dotenv-load 2 | set positional-arguments 3 | set export 4 | 5 | test: 6 | just attr/test 7 | just core/test 8 | just macro/test 9 | just ormlite/test 10 | just cli/build 11 | 12 | # Bump version. level=major,minor,patch 13 | version level: 14 | #!/bin/bash -euxo pipefail 15 | git diff-index --exit-code HEAD > /dev/null || ! echo You have untracked changes. Commit your changes before bumping the version. || exit 1 16 | 17 | echo $(dye -c INFO) Make sure that it builds first. 18 | (cd ormlite && cargo build --features runtime-tokio-rustls,sqlite) 19 | 20 | cargo set-version --bump {{ level }} --workspace 21 | VERSION=$(rg -om1 "version = \"(.*)\"" --replace '$1' ormlite/Cargo.toml) 22 | git commit -am "Bump version {{level}}" 23 | git tag v$VERSION 24 | git push 25 | git push --tags 26 | 27 | patch: 28 | just version patch 29 | just publish 30 | 31 | publish: 32 | cd attr && cargo publish 33 | cd core && cargo publish --features sqlite,postgres,mysql,runtime-tokio-rustls 34 | cd macro && cargo publish --features sqlite,postgres,mysql,runtime-tokio-rustls 35 | cd ormlite && cargo publish --features sqlite,postgres,mysql 36 | cd cli && cargo publish 37 | 38 | doc: 39 | cd ormlite && RUSTDOCFLAGS="--cfg docsrs" cargo +nightly doc --all-features --open -p ormlite --no-deps 40 | 41 | install: 42 | @just cli/install 43 | 44 | postgres *ARGS: 45 | @just ormlite/postgres $ARGS 46 | -------------------------------------------------------------------------------- /LICENSE.md: -------------------------------------------------------------------------------- 1 | The MIT License (MIT) 2 | ===================== 3 | 4 | Copyright © `` `` 5 | 6 | Permission is hereby granted, free of charge, to any person 7 | obtaining a copy of this software and associated documentation 8 | files (the “Software”), to deal in the Software without 9 | restriction, including without limitation the rights to use, 10 | copy, modify, merge, publish, distribute, sublicense, and/or sell 11 | copies of the Software, and to permit persons to whom the 12 | Software is furnished to do so, subject to the following 13 | conditions: 14 | 15 | The above copyright notice and this permission notice shall be 16 | included in all copies or substantial portions of the Software. 17 | 18 | THE SOFTWARE IS PROVIDED “AS IS”, WITHOUT WARRANTY OF ANY KIND, 19 | EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES 20 | OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND 21 | NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT 22 | HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, 23 | WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING 24 | FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR 25 | OTHER DEALINGS IN THE SOFTWARE. -------------------------------------------------------------------------------- /attr/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "ormlite-attr" 3 | version = "0.23.3" 4 | edition = "2021" 5 | description = "See `ormlite`." 6 | authors = ["Kurt Wolf "] 7 | license = "MIT" 8 | repository = "https://github.com/kurtbuilds/ormlite" 9 | homepage = "https://github.com/kurtbuilds/ormlite" 10 | documentation = "https://docs.rs/ormlite" 11 | 12 | [lib] 13 | doctest = false 14 | 15 | [features] 16 | 17 | [dependencies] 18 | proc-macro2.workspace = true 19 | quote.workspace = true 20 | syn.workspace = true 21 | structmeta = "0.3.0" 22 | convert_case.workspace = true 23 | anyhow.workspace = true 24 | ignore = "0.4" 25 | sqlmo.workspace = true 26 | tracing = "0.1" 27 | sqlformat = "=0.2.6" 28 | 29 | [dev-dependencies] 30 | syn = { version = "2.0.79", features = ["full", "extra-traits"] } 31 | -------------------------------------------------------------------------------- /attr/Justfile: -------------------------------------------------------------------------------- 1 | set dotenv-load 2 | set positional-arguments 3 | 4 | run *ARGS: 5 | cargo run -- "$@" 6 | 7 | test *ARGS: 8 | cargo test -- "$@" 9 | 10 | build: 11 | cargo build 12 | 13 | install: 14 | cargo install --path . 15 | 16 | check: 17 | cargo check 18 | -------------------------------------------------------------------------------- /attr/src/cfg_attr.rs: -------------------------------------------------------------------------------- 1 | use syn::parse::{Parse, ParseStream}; 2 | use syn::punctuated::Punctuated; 3 | use syn::token::Comma; 4 | use syn::{Meta, Token}; 5 | 6 | /// When doing static analysis, `structmeta` and `darling` do not parse attributes 7 | /// in a cfg_attr, so we have this struct to enable that 8 | pub struct CfgAttr { 9 | // pub _condition: Meta, 10 | pub attrs: Punctuated, 11 | } 12 | 13 | impl Parse for CfgAttr { 14 | fn parse(input: ParseStream) -> syn::Result { 15 | let _condition: Meta = input.parse()?; 16 | let _: Comma = input.parse()?; 17 | let attrs = input.parse_terminated(Meta::parse, Token![,])?; 18 | Ok(CfgAttr { attrs }) 19 | } 20 | } 21 | -------------------------------------------------------------------------------- /attr/src/derive.rs: -------------------------------------------------------------------------------- 1 | use syn::parse::{Parse, ParseStream}; 2 | use syn::punctuated::Punctuated; 3 | use syn::{Meta, Path, Token}; 4 | 5 | use crate::cfg_attr::CfgAttr; 6 | 7 | #[derive(Debug)] 8 | pub struct DeriveTrait { 9 | /// The derived trait 10 | pub name: String, 11 | /// The path to the derived trait 12 | pub path: Option, 13 | } 14 | 15 | impl DeriveTrait { 16 | pub fn has_derive(&self, pkg: &str, name: &str) -> bool { 17 | if self.name != name { 18 | return false; 19 | } 20 | match &self.path { 21 | None => true, 22 | Some(path) => path == pkg, 23 | } 24 | } 25 | 26 | pub fn has_any_derive(&self, pkg: &[&str], name: &str) -> bool { 27 | if self.name != name { 28 | return false; 29 | } 30 | match &self.path { 31 | None => true, 32 | Some(path) => pkg.contains(&path.as_str()), 33 | } 34 | } 35 | } 36 | 37 | impl From for DeriveTrait { 38 | fn from(value: Path) -> Self { 39 | let name = value.segments.last().as_ref().unwrap().ident.to_string(); 40 | let mut path = None; 41 | if value.segments.len() > 1 { 42 | path = value.segments.first().map(|s| s.ident.to_string()); 43 | } 44 | DeriveTrait { name, path } 45 | } 46 | } 47 | 48 | /// Existing libraries like `structmeta` and `darling` do not parse derives, as they are 49 | /// built assuming the data comes from proc-macro, whereas in ormlite we do both proc-macro 50 | /// as well as static codebase analysis 51 | #[derive(Debug, Default)] 52 | pub struct DeriveParser { 53 | derives: Vec, 54 | } 55 | 56 | impl DeriveParser { 57 | pub fn has_derive(&self, pkg: &str, name: &str) -> bool { 58 | self.derives.iter().any(|d| d.has_derive(pkg, name)) 59 | } 60 | 61 | pub fn has_any_derive(&self, pkg: &[&str], name: &str) -> bool { 62 | self.derives.iter().any(|d| d.has_any_derive(pkg, name)) 63 | } 64 | 65 | pub(crate) fn update(&mut self, other: Derive) { 66 | for path in other.inner { 67 | self.derives.push(path.into()); 68 | } 69 | } 70 | } 71 | 72 | impl DeriveParser { 73 | const ATTRIBUTE: &'static str = "derive"; 74 | 75 | pub fn from_attributes(attrs: &[syn::Attribute]) -> Self { 76 | let mut result = Self::default(); 77 | for attr in attrs { 78 | let Some(ident) = attr.path().get_ident() else { 79 | continue; 80 | }; 81 | if ident == Self::ATTRIBUTE { 82 | result.update(attr.parse_args().unwrap()); 83 | } else if ident == "cfg_attr" { 84 | let cfg: CfgAttr = attr.parse_args().unwrap(); 85 | for attr in cfg.attrs { 86 | let Some(ident) = attr.path().get_ident() else { 87 | continue; 88 | }; 89 | if ident == Self::ATTRIBUTE { 90 | let Meta::List(attrs) = attr else { 91 | panic!("Expected a list of attributes") 92 | }; 93 | result.update(attrs.parse_args().unwrap()); 94 | } 95 | } 96 | } 97 | } 98 | result 99 | } 100 | } 101 | 102 | /// Parses `#[derive(...)]` 103 | pub(crate) struct Derive { 104 | inner: Punctuated, 105 | } 106 | 107 | impl Parse for Derive { 108 | fn parse(input: ParseStream) -> syn::Result { 109 | Ok(Derive { 110 | inner: input.parse_terminated(Path::parse_mod_style, Token![,])?, 111 | }) 112 | } 113 | } 114 | 115 | #[cfg(test)] 116 | mod tests { 117 | use super::*; 118 | use crate::repr::Repr; 119 | 120 | #[test] 121 | fn test_repr() { 122 | let q = quote::quote! { 123 | #[derive(sqlx::Type)] 124 | #[repr(u8)] 125 | pub enum Foo { 126 | Bar, 127 | Baz, 128 | } 129 | }; 130 | let item = syn::parse2::(q).unwrap(); 131 | let derive = DeriveParser::from_attributes(&item.attrs); 132 | let repr = Repr::from_attributes(&item.attrs).unwrap(); 133 | assert!(derive.has_any_derive(&["ormlite", "sqlx"], "Type")); 134 | assert_eq!(repr, "u8"); 135 | } 136 | 137 | /// The attributes on this are sort of nonsense, but we want to test the dynamic attribute parsing 138 | /// in ormlite_attr::Attribute 139 | #[test] 140 | fn test_attributes() { 141 | // the doc string is the regression test 142 | let code = r#"/// Json-serializable representation of query results 143 | #[derive(Debug, Serialize, Deserialize, Clone, sqlx::Type, ormlite::Model)] 144 | #[repr(u8)] 145 | #[ormlite(table = "result")] 146 | #[deprecated] 147 | pub struct QuerySet { 148 | pub headers: Vec, 149 | pub rows: Vec>, 150 | }"#; 151 | let file: syn::File = syn::parse_str(code).unwrap(); 152 | let syn::Item::Struct(item) = file.items.first().unwrap() else { 153 | panic!("expected struct"); 154 | }; 155 | let attr = DeriveParser::from_attributes(&item.attrs); 156 | let repr = Repr::from_attributes(&item.attrs).unwrap(); 157 | assert_eq!(repr, "u8"); 158 | assert!(attr.has_derive("ormlite", "Model")); 159 | assert!(attr.has_any_derive(&["ormlite", "sqlx"], "Type")); 160 | assert!(!attr.has_derive("ormlite", "ManualType")); 161 | } 162 | 163 | #[test] 164 | fn test_cfg_attr() { 165 | // the doc string is the regression test 166 | let code = r#" 167 | #[derive(Debug, Serialize, Deserialize, Clone, Copy)] 168 | #[cfg_attr( 169 | target_arch = "wasm32", 170 | derive(tsify::Tsify), 171 | tsify(into_wasm_abi, from_wasm_abi) 172 | )] 173 | #[cfg_attr( 174 | not(target_arch = "wasm32"), 175 | derive( 176 | sqlx::Type, 177 | strum::IntoStaticStr, 178 | strum::EnumString, 179 | ), 180 | strum(serialize_all = "snake_case") 181 | )] 182 | #[serde(rename_all = "snake_case")] 183 | pub enum Privacy { 184 | Private, 185 | Team, 186 | Public, 187 | } 188 | "#; 189 | let file: syn::File = syn::parse_str(code).unwrap(); 190 | let syn::Item::Enum(item) = file.items.first().unwrap() else { 191 | panic!() 192 | }; 193 | let attr = DeriveParser::from_attributes(&item.attrs); 194 | assert!(attr.has_any_derive(&["ormlite", "sqlx"], "Type")); 195 | } 196 | 197 | #[test] 198 | fn test_cfg_attr2() { 199 | let code = r#" 200 | #[derive(Debug, Serialize, Deserialize, Clone, Copy)] 201 | #[cfg_attr( 202 | target_arch = "wasm32", 203 | derive(tsify::Tsify), 204 | tsify(into_wasm_abi, from_wasm_abi) 205 | )] 206 | #[cfg_attr( 207 | not(target_arch = "wasm32"), 208 | derive(ormlite::types::ManualType, strum::IntoStaticStr, strum::EnumString), 209 | strum(serialize_all = "snake_case") 210 | )] 211 | #[serde(rename_all = "snake_case")] 212 | pub enum Privacy { 213 | Private, 214 | Team, 215 | Public, 216 | } 217 | "#; 218 | let file: syn::File = syn::parse_str(code).unwrap(); 219 | let syn::Item::Enum(item) = file.items.first().unwrap() else { 220 | panic!() 221 | }; 222 | let attr = DeriveParser::from_attributes(&item.attrs); 223 | assert_eq!(attr.has_derive("ormlite", "ManualType"), true); 224 | } 225 | } 226 | -------------------------------------------------------------------------------- /attr/src/error.rs: -------------------------------------------------------------------------------- 1 | use std::error::Error; 2 | use std::fmt::Display; 3 | 4 | #[derive(Debug)] 5 | pub struct SyndecodeError(pub String); 6 | 7 | impl Display for SyndecodeError { 8 | fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { 9 | write!(f, "{}", self.0) 10 | } 11 | } 12 | 13 | impl Error for SyndecodeError {} 14 | -------------------------------------------------------------------------------- /attr/src/ext.rs: -------------------------------------------------------------------------------- 1 | use syn::{Data, DataStruct, DeriveInput, Field, Fields, FieldsNamed}; 2 | 3 | pub trait DeriveInputExt { 4 | fn fields(&self) -> syn::punctuated::Iter; 5 | } 6 | 7 | impl DeriveInputExt for DeriveInput { 8 | fn fields(&self) -> syn::punctuated::Iter { 9 | let fields = match &self.data { 10 | Data::Struct(DataStruct { ref fields, .. }) => fields, 11 | _ => panic!("#[ormlite] can only be used on structs"), 12 | }; 13 | let fields = match fields { 14 | Fields::Named(FieldsNamed { named, .. }) => named, 15 | _ => panic!("#[ormlite] can only be used on structs with named fields"), 16 | }; 17 | fields.iter() 18 | } 19 | } 20 | -------------------------------------------------------------------------------- /attr/src/ident.rs: -------------------------------------------------------------------------------- 1 | //! This ident needs to exist because the proc_macro2 idents are not Send. 2 | use proc_macro2::TokenStream; 3 | use quote::TokenStreamExt; 4 | 5 | #[derive(Clone, Debug, Hash, PartialEq, Eq)] 6 | pub struct Ident(String); 7 | 8 | impl Ident { 9 | pub fn as_ref(&self) -> &String { 10 | &self.0 11 | } 12 | } 13 | 14 | impl From<&proc_macro2::Ident> for Ident { 15 | fn from(ident: &proc_macro2::Ident) -> Self { 16 | Ident(ident.to_string()) 17 | } 18 | } 19 | 20 | impl From<&str> for Ident { 21 | fn from(ident: &str) -> Self { 22 | Ident(ident.to_string()) 23 | } 24 | } 25 | 26 | impl From for Ident { 27 | fn from(ident: String) -> Self { 28 | Ident(ident) 29 | } 30 | } 31 | 32 | impl From<&String> for Ident { 33 | fn from(ident: &String) -> Self { 34 | Ident(ident.clone()) 35 | } 36 | } 37 | 38 | impl quote::ToTokens for Ident { 39 | fn to_tokens(&self, tokens: &mut TokenStream) { 40 | tokens.append(proc_macro2::Ident::new(&self.0, proc_macro2::Span::call_site())) 41 | } 42 | } 43 | 44 | impl std::fmt::Display for Ident { 45 | fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { 46 | write!(f, "{}", self.0) 47 | } 48 | } 49 | 50 | impl PartialEq for Ident 51 | where 52 | T: AsRef, 53 | { 54 | fn eq(&self, t: &T) -> bool { 55 | let t = t.as_ref(); 56 | self.0.as_str() == t 57 | } 58 | } 59 | -------------------------------------------------------------------------------- /attr/src/lib.rs: -------------------------------------------------------------------------------- 1 | #![allow(non_snake_case)] 2 | 3 | use std::collections::HashMap; 4 | use std::path::{Path, PathBuf}; 5 | use std::{env, fs}; 6 | 7 | use anyhow::Context; 8 | use ignore::Walk; 9 | use syn::{DeriveInput, Item}; 10 | 11 | use crate::derive::DeriveParser; 12 | use crate::repr::Repr; 13 | pub use error::*; 14 | pub use ext::*; 15 | pub use ident::*; 16 | pub use metadata::*; 17 | pub use ttype::*; 18 | 19 | mod cfg_attr; 20 | mod derive; 21 | mod error; 22 | mod ext; 23 | mod ident; 24 | mod metadata; 25 | mod repr; 26 | pub mod ttype; 27 | 28 | #[derive(Default, Debug)] 29 | pub struct LoadOptions { 30 | pub verbose: bool, 31 | } 32 | 33 | /// This is an intermediate representation of the schema. 34 | /// 35 | pub struct OrmliteSchema { 36 | pub tables: Vec, 37 | // map of rust structs (e.g. enums) to database encodings. 38 | // note that these are not bona fide postgres types. 39 | pub type_reprs: HashMap, 40 | } 41 | 42 | struct Intermediate { 43 | model_structs: Vec, 44 | type_structs: Vec<(syn::ItemStruct, Option)>, 45 | type_enums: Vec<(syn::ItemEnum, Option)>, 46 | } 47 | 48 | impl Intermediate { 49 | fn into_models_and_types( 50 | self, 51 | ) -> ( 52 | impl Iterator, 53 | impl Iterator)>, 54 | ) { 55 | let models = self.model_structs.into_iter(); 56 | let types = self 57 | .type_structs 58 | .into_iter() 59 | .map(|(s, a)| (s.ident.to_string(), a)) 60 | .chain(self.type_enums.into_iter().map(|(e, a)| (e.ident.to_string(), a))); 61 | (models, types) 62 | } 63 | 64 | fn from_file(value: syn::File) -> Self { 65 | let mut model_structs = Vec::new(); 66 | let mut type_structs = Vec::new(); 67 | let mut type_enums = Vec::new(); 68 | for item in value.items { 69 | match item { 70 | Item::Struct(s) => { 71 | let attrs = DeriveParser::from_attributes(&s.attrs); 72 | if attrs.has_derive("ormlite", "Model") { 73 | tracing::debug!(model=%s.ident.to_string(), "Found"); 74 | model_structs.push(s); 75 | } else if attrs.has_any_derive(&["ormlite", "sqlx"], "Type") { 76 | tracing::debug!(r#type=%s.ident.to_string(), "Found"); 77 | let repr = Repr::from_attributes(&s.attrs); 78 | type_structs.push((s, repr)); 79 | } else if attrs.has_derive("ormlite", "ManualType") { 80 | tracing::debug!(r#type=%s.ident.to_string(), "Found"); 81 | let repr = Repr::from_attributes(&s.attrs); 82 | type_structs.push((s, repr)); 83 | } 84 | } 85 | Item::Enum(e) => { 86 | let attrs = DeriveParser::from_attributes(&e.attrs); 87 | if attrs.has_derive("ormlite", "Type") || attrs.has_derive("ormlite", "ManualType") { 88 | tracing::debug!(r#type=%e.ident.to_string(), "Found"); 89 | let repr = Repr::from_attributes(&e.attrs); 90 | type_enums.push((e, repr)); 91 | } 92 | } 93 | _ => {} 94 | } 95 | } 96 | Self { 97 | model_structs, 98 | type_structs, 99 | type_enums, 100 | } 101 | } 102 | } 103 | 104 | pub fn schema_from_filepaths(paths: &[&Path]) -> anyhow::Result { 105 | let cwd = env::var("CARGO_RUSTC_CURRENT_DIR") 106 | .or_else(|_| env::var("CARGO_MANIFEST_DIR")) 107 | .map(PathBuf::from) 108 | .or_else(|_| env::current_dir()) 109 | .expect("Failed to get current directory for schema"); 110 | 111 | let paths = paths.iter().map(|p| cwd.join(p)).collect::>(); 112 | let invalid_paths = paths.iter().filter(|p| fs::metadata(p).is_err()).collect::>(); 113 | if !invalid_paths.is_empty() { 114 | for path in &invalid_paths { 115 | tracing::error!(path = path.display().to_string(), "Does not exist"); 116 | } 117 | let paths = invalid_paths 118 | .iter() 119 | .map(|p| p.display().to_string()) 120 | .collect::>() 121 | .join(", "); 122 | anyhow::bail!("Provided paths that did not exist: {}", paths); 123 | } 124 | 125 | let walk = paths.iter().flat_map(Walk::new); 126 | 127 | let walk = walk 128 | .map(|e| e.unwrap()) 129 | .filter(|e| e.path().extension().map(|e| e == "rs").unwrap_or(false)) 130 | .map(|e| e.into_path()) 131 | .chain(paths.iter().filter(|p| p.ends_with(".rs")).map(|p| p.to_path_buf())); 132 | 133 | let mut tables = vec![]; 134 | let mut type_aliases = HashMap::new(); 135 | for entry in walk { 136 | let contents = fs::read_to_string(&entry).context(format!("failed to read file: {}", entry.display()))?; 137 | tracing::debug!( 138 | file = entry.display().to_string(), 139 | "Checking for Model, Type, ManualType derive attrs" 140 | ); 141 | if !(contents.contains("Model") || contents.contains("Type") || contents.contains("ManualType")) { 142 | continue; 143 | } 144 | let ast = syn::parse_file(&contents).context(format!("Failed to parse file: {}", entry.display()))?; 145 | let intermediate = Intermediate::from_file(ast); 146 | let (models, types) = intermediate.into_models_and_types(); 147 | 148 | for item in models { 149 | let derive: DeriveInput = item.into(); 150 | tables.push(ModelMeta::from_derive(&derive)); 151 | } 152 | 153 | for (name, repr) in types { 154 | let ty = repr.map(|s| s.to_string()).unwrap_or_else(|| "String".to_string()); 155 | type_aliases.insert(name, ty); 156 | } 157 | } 158 | Ok(OrmliteSchema { 159 | tables, 160 | type_reprs: type_aliases, 161 | }) 162 | } 163 | -------------------------------------------------------------------------------- /attr/src/metadata/column.rs: -------------------------------------------------------------------------------- 1 | use crate::{Ident, Type}; 2 | use proc_macro2::TokenStream; 3 | use structmeta::{Flag, StructMeta}; 4 | use syn::{Attribute, Field, LitStr, Path}; 5 | 6 | #[derive(Debug, Clone)] 7 | pub enum Join { 8 | ManyToOne { 9 | /// Name of local column on the table that maps to the fk on the other table 10 | column: String, 11 | }, 12 | ManyToMany { 13 | table: String, 14 | }, 15 | OneToMany { 16 | model: String, 17 | field: String, 18 | }, 19 | } 20 | 21 | /// All the metadata we can capture about a column 22 | #[derive(Clone, Debug)] 23 | pub struct ColumnMeta { 24 | /// Name of the column in the database 25 | pub name: String, 26 | pub ty: Type, 27 | /// Only says whether the primary key is marked (with an attribute). Use table_metadata.primary_key to definitively know the primary key. 28 | pub marked_primary_key: bool, 29 | pub has_database_default: bool, 30 | /// Identifier used in Rust to refer to the column 31 | pub ident: Ident, 32 | 33 | pub skip: bool, 34 | pub rust_default: Option, 35 | pub join: Option, 36 | pub json: bool, 37 | } 38 | 39 | impl ColumnMeta { 40 | pub fn is_default(&self) -> bool { 41 | self.rust_default.is_some() || self.has_database_default 42 | } 43 | 44 | pub fn from_fields<'a>(fields: impl Iterator) -> Vec { 45 | fields.map(|f| ColumnMeta::from_field(f)).collect() 46 | } 47 | 48 | pub fn from_syn(ident: &syn::Ident, ty: &syn::Type) -> Self { 49 | let syn::Type::Path(ty) = &ty else { 50 | panic!("No type on field {}", ident); 51 | }; 52 | Self { 53 | name: ident.to_string(), 54 | ty: Type::from(&ty.path), 55 | marked_primary_key: false, 56 | has_database_default: false, 57 | ident: Ident::from(ident), 58 | skip: false, 59 | rust_default: None, 60 | join: None, 61 | json: false, 62 | } 63 | } 64 | 65 | pub fn is_join(&self) -> bool { 66 | matches!(self.ty, Type::Join(_)) 67 | } 68 | 69 | pub fn is_join_one(&self) -> bool { 70 | let Some(join) = &self.join else { 71 | return false; 72 | }; 73 | matches!(join, Join::ManyToOne { .. }) 74 | } 75 | 76 | pub fn is_join_many(&self) -> bool { 77 | let Some(join) = &self.join else { 78 | return false; 79 | }; 80 | matches!(join, Join::ManyToOne { .. } | Join::ManyToMany { .. }) 81 | } 82 | 83 | pub fn is_option(&self) -> bool { 84 | matches!(self.ty, Type::Option(_)) 85 | } 86 | 87 | pub fn is_json(&self) -> bool { 88 | self.ty.is_json() || self.json 89 | } 90 | 91 | /// We expect this to only return a `Model` of some kind. 92 | pub fn joined_struct_name(&self) -> Option { 93 | let Type::Join(join) = &self.ty else { 94 | return None; 95 | }; 96 | Some(join.inner_type_name()) 97 | } 98 | 99 | pub fn joined_model(&self) -> TokenStream { 100 | self.ty.qualified_inner_name() 101 | } 102 | 103 | pub fn from_field(f: &Field) -> Self { 104 | let ident = f.ident.as_ref().expect("No ident on field"); 105 | let attrs = ColumnAttr::from_attrs(&f.attrs); 106 | let mut column = ColumnMeta::from_syn(ident, &f.ty); 107 | for attr in attrs { 108 | if attr.primary_key.value() { 109 | column.marked_primary_key = true; 110 | column.has_database_default = true; 111 | } 112 | if let Some(c) = attr.column { 113 | column.name = c.value(); 114 | if column.ty.is_join() { 115 | column.join = Some(Join::ManyToOne { column: c.value() }); 116 | } 117 | } 118 | if let Some(table_name) = attr.join_table { 119 | column.join = Some(Join::ManyToMany { 120 | table: table_name.value(), 121 | }); 122 | } 123 | if let Some(path) = attr.foreign_field { 124 | let mut segments = path.segments.iter(); 125 | let model = segments 126 | .next() 127 | .expect("no model on foreign field attribute") 128 | .ident 129 | .to_string(); 130 | let field = segments 131 | .next() 132 | .expect("no field on foreign field attribute") 133 | .ident 134 | .to_string(); 135 | column.join = Some(Join::OneToMany { model, field }); 136 | } 137 | if let Some(default_value) = attr.default_value { 138 | column.rust_default = Some(default_value.value()); 139 | } 140 | column.has_database_default |= attr.default.value(); 141 | column.marked_primary_key |= attr.insertable_primary_key.value(); 142 | column.skip |= attr.skip.value(); 143 | column.json |= attr.json.value(); 144 | } 145 | if column.ty.is_join() ^ column.join.is_some() { 146 | panic!("Column {ident} is a Join. You must specify one of these attributes: column (many to one), join_table (many to many), or foreign_field (one to many)"); 147 | } 148 | column 149 | } 150 | 151 | #[doc(hidden)] 152 | pub fn mock(name: &str, ty: &str) -> Self { 153 | Self { 154 | name: name.to_string(), 155 | ty: Type::Inner(crate::InnerType::mock(ty)), 156 | marked_primary_key: false, 157 | has_database_default: false, 158 | ident: Ident::from(name), 159 | skip: false, 160 | rust_default: None, 161 | join: None, 162 | json: false, 163 | } 164 | } 165 | 166 | #[doc(hidden)] 167 | pub fn mock_join(name: &str, join_model: &str) -> Self { 168 | Self { 169 | name: name.to_string(), 170 | ty: Type::Join(Box::new(Type::Inner(crate::InnerType::mock(join_model)))), 171 | marked_primary_key: false, 172 | has_database_default: false, 173 | ident: Ident::from(name), 174 | skip: false, 175 | rust_default: None, 176 | join: None, 177 | json: false, 178 | } 179 | } 180 | } 181 | 182 | #[derive(Clone, Debug)] 183 | pub struct ForeignKey { 184 | pub model: String, 185 | pub column: String, 186 | } 187 | 188 | /// Available attributes on a column (struct field) 189 | #[derive(StructMeta)] 190 | pub struct ColumnAttr { 191 | pub primary_key: Flag, 192 | /// Marks a primary key, but includes it in the Insert struct. 193 | pub insertable_primary_key: Flag, 194 | /// Specifies that a default exists at the database level. 195 | pub default: Flag, 196 | /// Specify a default value on the Rust side. 197 | pub default_value: Option, 198 | 199 | /// Example: 200 | /// pub struct User { 201 | /// pub org_id: i32, 202 | /// #[ormlite(join_table = "user_role")] 203 | /// pub roles: Join>, 204 | /// } 205 | pub join_table: Option, 206 | 207 | /// Example: 208 | /// pub struct User { 209 | /// pub id: i32, 210 | /// #[ormlite(foreign_field = Post::author_id)] 211 | /// pub posts: Join>, 212 | /// } 213 | /// 214 | /// pub struct Post { 215 | /// pub id: i32, 216 | /// pub author_id: i32, 217 | /// } 218 | pub foreign_field: Option, 219 | 220 | /// The name of the column in the database. Defaults to the field name. 221 | /// 222 | /// Required for many to one joins. 223 | /// Example: 224 | /// pub struct User { 225 | /// #[ormlite(column = "organization_id")] 226 | /// pub organization: Join, 227 | /// } 228 | pub column: Option, 229 | 230 | /// Skip serializing this field to/from the database. Note the field must implement `Default`. 231 | pub skip: Flag, 232 | 233 | pub json: Flag, 234 | } 235 | 236 | impl ColumnAttr { 237 | pub fn from_attrs(ast: &[Attribute]) -> Vec { 238 | ast.iter() 239 | .filter(|a| a.path().is_ident("ormlite")) 240 | .map(|a| a.parse_args().unwrap()) 241 | .collect() 242 | } 243 | } 244 | 245 | #[cfg(test)] 246 | mod tests { 247 | use super::*; 248 | use syn::{parse_quote, Attribute, Fields, ItemStruct}; 249 | 250 | #[test] 251 | fn test_from_field() { 252 | let item: ItemStruct = syn::parse_str( 253 | r#" 254 | struct Foo { 255 | #[ormlite(default_value = "\"foo\".to_string()")] 256 | pub name: String 257 | } 258 | "#, 259 | ) 260 | .unwrap(); 261 | let Fields::Named(fields) = item.fields else { 262 | panic!(); 263 | }; 264 | let field = fields.named.first().unwrap(); 265 | let column = ColumnMeta::from_field(field); 266 | assert_eq!(column.name, "name"); 267 | assert_eq!(column.ty, "String"); 268 | assert_eq!(column.marked_primary_key, false); 269 | assert_eq!(column.has_database_default, false); 270 | assert_eq!(column.rust_default, Some("\"foo\".to_string()".to_string())); 271 | assert_eq!(column.ident, "name"); 272 | } 273 | 274 | #[test] 275 | fn test_default() { 276 | let attr: Attribute = parse_quote!(#[ormlite(default_value = "serde_json::Value::Null")]); 277 | let args: ColumnAttr = attr.parse_args().unwrap(); 278 | assert!(args.default_value.is_some()); 279 | 280 | let attr: Attribute = parse_quote!(#[ormlite(default)]); 281 | let args: ColumnAttr = attr.parse_args().unwrap(); 282 | assert!(args.default.value()); 283 | } 284 | 285 | #[test] 286 | fn test_column() { 287 | let attr: Attribute = parse_quote!(#[ormlite(column = "org_id")]); 288 | let args: ColumnAttr = attr.parse_args().unwrap(); 289 | assert!(args.column.is_some()); 290 | } 291 | } 292 | -------------------------------------------------------------------------------- /attr/src/metadata/insert.rs: -------------------------------------------------------------------------------- 1 | use crate::Ident; 2 | use crate::{TableAttr, TableMeta}; 3 | use syn::DeriveInput; 4 | 5 | /// Metadata used for IntoArguments, TableMeta, and (subset of) Model 6 | /// This structs are constructed from the *Attribute structs in crate::attr. 7 | #[derive(Debug, Clone)] 8 | pub struct InsertMeta { 9 | pub table: TableMeta, 10 | pub returns: Ident, 11 | /// Only gets set if the table attribute was set 12 | pub name: Option, 13 | } 14 | 15 | impl InsertMeta { 16 | pub fn from_derive(ast: &DeriveInput) -> Self { 17 | let attrs = TableAttr::from_attrs(&ast.attrs); 18 | let table = TableMeta::new(ast, &attrs); 19 | let mut returns = None; 20 | let mut name = None; 21 | for attr in attrs { 22 | if let Some(v) = attr.returns { 23 | returns = Some(v.value()); 24 | } 25 | if let Some(v) = attr.table { 26 | name = Some(v.value()); 27 | } 28 | } 29 | let returns = 30 | returns.expect("You must specify #[ormlite(returns = \"...\")] for structs marked with #[derive(Insert)]"); 31 | let returns = Ident::from(returns); 32 | Self { table, returns, name } 33 | } 34 | } 35 | 36 | impl std::ops::Deref for InsertMeta { 37 | type Target = TableMeta; 38 | 39 | fn deref(&self) -> &Self::Target { 40 | &self.table 41 | } 42 | } 43 | 44 | #[cfg(test)] 45 | mod tests { 46 | use syn::{parse_str, ItemStruct}; 47 | 48 | use super::*; 49 | 50 | #[test] 51 | fn test_name() { 52 | let s = r#"#[derive(Insert)] 53 | #[ormlite(returns = "User")] 54 | pub struct InsertUser2 { 55 | name: String, 56 | number: i32, 57 | ty: i32, 58 | org_id: i32, 59 | }"#; 60 | let s: ItemStruct = parse_str(s).unwrap(); 61 | let s = DeriveInput::from(s); 62 | let meta = InsertMeta::from_derive(&s); 63 | assert_eq!(meta.returns, "User"); 64 | } 65 | } 66 | -------------------------------------------------------------------------------- /attr/src/metadata/mod.rs: -------------------------------------------------------------------------------- 1 | pub use column::*; 2 | pub use model::*; 3 | pub use table::*; 4 | pub use insert::*; 5 | 6 | mod column; 7 | mod model; 8 | mod table; 9 | mod insert; 10 | -------------------------------------------------------------------------------- /attr/src/metadata/model.rs: -------------------------------------------------------------------------------- 1 | use crate::metadata::column::ColumnMeta; 2 | use crate::metadata::table::TableMeta; 3 | use crate::Ident; 4 | use crate::TableAttr; 5 | use syn::DeriveInput; 6 | 7 | /// Metadata used for IntoArguments, TableMeta, and (subset of) Model 8 | #[derive(Debug, Clone)] 9 | pub struct ModelMeta { 10 | pub table: TableMeta, 11 | pub insert_struct: Option, 12 | pub extra_derives: Option>, 13 | pub pkey: ColumnMeta, 14 | } 15 | 16 | impl ModelMeta { 17 | pub fn builder_struct(&self) -> Ident { 18 | Ident::from(format!("{}Builder", self.ident.as_ref())) 19 | } 20 | 21 | pub fn database_columns_except_pkey(&self) -> impl Iterator + '_ { 22 | self.columns 23 | .iter() 24 | .filter(|&c| !c.skip) 25 | .filter(|&c| self.pkey.name != c.name) 26 | } 27 | 28 | pub fn from_derive(ast: &DeriveInput) -> Self { 29 | let attrs = TableAttr::from_attrs(&ast.attrs); 30 | let table = TableMeta::new(ast, &attrs); 31 | let pkey = table.pkey.as_deref().expect(&format!( 32 | "No column marked with #[ormlite(primary_key)], and no column named id, uuid, {0}_id, or {0}_uuid", 33 | table.name, 34 | )); 35 | let mut insert_struct = None; 36 | let mut extra_derives: Option> = None; 37 | for attr in attrs { 38 | if let Some(v) = attr.insert { 39 | insert_struct = Some(v.value()); 40 | } 41 | if let Some(v) = attr.insertable { 42 | insert_struct = Some(v.to_string()); 43 | } 44 | if let Some(v) = attr.extra_derives { 45 | if !v.is_empty() { 46 | extra_derives = Some(v); 47 | } 48 | } 49 | } 50 | let pkey = table.columns.iter().find(|&c| c.name == pkey).unwrap().clone(); 51 | let insert_struct = insert_struct.map(|v| Ident::from(v)); 52 | let extra_derives = extra_derives.take().map(|vec| vec.into_iter().map(|v| v.to_string()).map(Ident::from).collect()); 53 | 54 | Self { 55 | table, 56 | insert_struct, 57 | extra_derives, 58 | pkey, 59 | } 60 | } 61 | 62 | #[doc(hidden)] 63 | pub fn mock(name: &str, columns: Vec) -> Self { 64 | let inner = TableMeta::mock(name, columns); 65 | Self { 66 | pkey: inner.columns.iter().find(|c| c.name == "id").unwrap().clone(), 67 | table: inner, 68 | extra_derives: None, 69 | insert_struct: None, 70 | } 71 | } 72 | } 73 | 74 | impl std::ops::Deref for ModelMeta { 75 | type Target = TableMeta; 76 | 77 | fn deref(&self) -> &Self::Target { 78 | &self.table 79 | } 80 | } 81 | 82 | #[cfg(test)] 83 | mod tests { 84 | use super::*; 85 | use syn::ItemStruct; 86 | 87 | #[test] 88 | fn test_decode_metadata() { 89 | let ast = syn::parse_str::( 90 | r#"struct User { 91 | #[ormlite(column = "Id")] 92 | id: i32, 93 | }"#, 94 | ) 95 | .unwrap(); 96 | let input = DeriveInput::from(ast); 97 | let meta = ModelMeta::from_derive(&input); 98 | assert_eq!(meta.pkey.name, "Id"); 99 | } 100 | } 101 | -------------------------------------------------------------------------------- /attr/src/metadata/table.rs: -------------------------------------------------------------------------------- 1 | use crate::metadata::column::ColumnMeta; 2 | use crate::DeriveInputExt; 3 | use crate::Ident; 4 | use convert_case::{Case, Casing}; 5 | use structmeta::StructMeta; 6 | use syn::{Attribute, DeriveInput, LitStr}; 7 | 8 | /// Metadata used for IntoArguments, TableMeta, and (subset of) Model 9 | /// This structs are constructed from the *Attribute structs in crate::attr. 10 | #[derive(Debug, Clone)] 11 | pub struct TableMeta { 12 | pub name: String, 13 | pub ident: Ident, 14 | pub columns: Vec, 15 | pub databases: Vec, 16 | 17 | /// If you're using this, consider whether you should be using a ModelMetadata and its pkey, 18 | /// which is not optional, instead. 19 | pub pkey: Option, 20 | } 21 | 22 | impl TableMeta { 23 | pub fn new(ast: &DeriveInput, attrs: &[TableAttr]) -> Self { 24 | let ident = &ast.ident; 25 | let name = if let Some(value) = attrs.iter().find_map(|a| a.table.as_ref()) { 26 | value.value() 27 | } else { 28 | ident.to_string().to_case(Case::Snake) 29 | }; 30 | let mut columns = ColumnMeta::from_fields(ast.fields()); 31 | let mut pkey = columns 32 | .iter() 33 | .find(|&c| c.marked_primary_key) 34 | .map(|c| c.clone()) 35 | .map(|c| c.name.clone()); 36 | if pkey.is_none() { 37 | let candidates = sqlmo::util::pkey_column_names(&name); 38 | if let Some(c) = columns.iter_mut().find(|c| candidates.iter().any(|n| c.ident == n)) { 39 | c.has_database_default = true; 40 | pkey = Some(c.name.clone()); 41 | } 42 | } 43 | let databases = attrs.iter().flat_map(|d| &d.database).map(|d| d.value()).collect(); 44 | Self { 45 | name, 46 | ident: Ident::from(ident), 47 | columns, 48 | databases, 49 | pkey, 50 | } 51 | } 52 | 53 | pub fn from_derive(ast: &DeriveInput) -> Self { 54 | let attr = TableAttr::from_attrs(&ast.attrs); 55 | Self::new(ast, &attr) 56 | } 57 | 58 | pub fn all_fields(&self) -> impl Iterator + '_ { 59 | self.columns.iter().map(|c| &c.ident) 60 | } 61 | 62 | pub fn database_columns(&self) -> impl Iterator + '_ { 63 | self.columns 64 | .iter() 65 | .filter(|&c| !c.skip) 66 | .filter(|&c| !c.is_join() || c.is_join_one()) 67 | } 68 | 69 | pub fn many_to_one_joins(&self) -> impl Iterator + '_ { 70 | self.columns.iter().filter(|&c| c.is_join_one()) 71 | } 72 | 73 | #[allow(dead_code)] 74 | pub(crate) fn mock(name: &str, columns: Vec) -> Self { 75 | TableMeta { 76 | name: name.to_string(), 77 | ident: Ident::from(name.to_case(Case::Pascal)), 78 | pkey: None, 79 | columns, 80 | databases: vec![], 81 | } 82 | } 83 | } 84 | 85 | /// Available attributes on a struct 86 | #[derive(StructMeta)] 87 | pub struct TableAttr { 88 | /// The name of the table in the database. Defaults to the struct name. 89 | /// Example: 90 | /// #[ormlite(table = "users")] 91 | /// pub struct User { 92 | /// pub id: i32, 93 | /// } 94 | pub table: Option, 95 | 96 | /// Deprecated name for insert 97 | /// Used as `#[ormlite(insertable = InsertUser)]` 98 | pub insertable: Option, 99 | 100 | /// The struct name of an insertion struct. 101 | /// Example: 102 | /// #[ormlite(insert = "InsertUser")] 103 | /// pub struct User { 104 | /// pub id: i32, 105 | /// } 106 | /// 107 | pub insert: Option, 108 | 109 | /// Add extra derives to the insertion structs. 110 | /// Example: 111 | /// #[ormlite(insert = "InsertUser", extra_derives(Serialize, Deserialize))] 112 | /// pub struct User { 113 | /// pub id: i32, 114 | /// } 115 | /// 116 | pub extra_derives: Option>, 117 | 118 | /// Only used for derive(Insert) 119 | /// Example: 120 | /// #[ormlite(returns = "User")] 121 | /// pub struct InsertUser {} 122 | pub returns: Option, 123 | 124 | /// Set the target database. Only needed if you have multiple databases enabled. 125 | /// If you have a single database enabled, you don't need to set this. 126 | /// Even with multiple databases, you can skip this by setting a default database with the `default-` feature. 127 | /// 128 | /// Currently, because methods conflict, you 129 | /// You can use this attribute multiple times to set multiple databases. 130 | /// Example: 131 | /// #[ormlite(database = "postgres")] 132 | /// #[ormlite(database = "sqlite")] 133 | /// pub struct User { 134 | /// pub id: i32, 135 | /// } 136 | /// This will generate orm code for `User` for both the `postgres` and `sqlite` databases. 137 | pub database: Option, 138 | } 139 | 140 | impl TableAttr { 141 | pub fn from_attrs(attrs: &[Attribute]) -> Vec { 142 | attrs 143 | .iter() 144 | .filter(|&a| a.path().is_ident("ormlite")) 145 | .map(|a| a.parse_args().unwrap()) 146 | .collect() 147 | } 148 | } 149 | -------------------------------------------------------------------------------- /attr/src/repr.rs: -------------------------------------------------------------------------------- 1 | use quote::ToTokens; 2 | use structmeta::StructMeta; 3 | use syn::Path; 4 | 5 | #[derive(StructMeta)] 6 | pub struct Repr(#[struct_meta(unnamed)] Path); 7 | 8 | impl std::fmt::Debug for Repr { 9 | fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { 10 | self.0.to_token_stream().to_string().fmt(f) 11 | } 12 | } 13 | 14 | impl std::fmt::Display for Repr { 15 | fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { 16 | self.0.to_token_stream().to_string().fmt(f) 17 | } 18 | } 19 | 20 | impl PartialEq<&str> for Repr { 21 | fn eq(&self, &other: &&str) -> bool { 22 | self.0.is_ident(other) 23 | } 24 | } 25 | 26 | impl Repr { 27 | const ATTRIBUTE: &'static str = "repr"; 28 | 29 | pub fn from_attributes(attrs: &[syn::Attribute]) -> Option { 30 | for a in attrs { 31 | let Some(ident) = a.path().get_ident() else { 32 | continue; 33 | }; 34 | if ident == Self::ATTRIBUTE { 35 | // semantically, the parse error and returning Some are different, 36 | // so we're writing out Some() instead of using `.ok()` 37 | return Some(a.parse_args().unwrap()); 38 | } 39 | } 40 | None 41 | } 42 | } 43 | -------------------------------------------------------------------------------- /attr/src/ttype.rs: -------------------------------------------------------------------------------- 1 | use crate::Ident; 2 | use proc_macro2::TokenStream; 3 | use quote::TokenStreamExt; 4 | use syn::PathArguments; 5 | 6 | #[derive(Clone, Debug, PartialEq, Hash)] 7 | pub enum Type { 8 | Option(Box), 9 | Vec(Box), 10 | /// Database primitive, includes DateTime, Jsonb, etc. 11 | Inner(InnerType), 12 | Join(Box), 13 | } 14 | 15 | impl Type { 16 | pub fn joined_type(&self) -> Option<&Type> { 17 | match &self { 18 | Type::Join(ty) => Some(ty.as_ref()), 19 | _ => None, 20 | } 21 | } 22 | 23 | pub fn is_string(&self) -> bool { 24 | match self { 25 | Type::Inner(ty) => ty.ident == "String", 26 | _ => false, 27 | } 28 | } 29 | 30 | pub fn is_json(&self) -> bool { 31 | match self { 32 | Type::Inner(ty) => ty.ident == "Json", 33 | Type::Option(ty) => ty.is_json(), 34 | _ => false, 35 | } 36 | } 37 | 38 | pub fn is_join(&self) -> bool { 39 | matches!(self, Type::Join(_)) 40 | } 41 | 42 | pub fn is_option(&self) -> bool { 43 | matches!(self, Type::Option(_)) 44 | } 45 | 46 | pub fn inner_type_name(&self) -> String { 47 | match self { 48 | Type::Inner(ty) => ty.ident.to_string(), 49 | Type::Option(ty) => ty.inner_type_name(), 50 | Type::Vec(ty) => ty.inner_type_name(), 51 | Type::Join(ty) => ty.inner_type_name(), 52 | } 53 | } 54 | 55 | pub fn inner_type_mut(&mut self) -> &mut InnerType { 56 | match self { 57 | Type::Inner(ty) => ty, 58 | Type::Option(ty) => ty.inner_type_mut(), 59 | Type::Vec(ty) => ty.inner_type_mut(), 60 | Type::Join(ty) => ty.inner_type_mut(), 61 | } 62 | } 63 | 64 | pub fn inner_type(&self) -> &InnerType { 65 | match self { 66 | Type::Inner(ty) => ty, 67 | Type::Option(ty) => ty.inner_type(), 68 | Type::Vec(ty) => ty.inner_type(), 69 | Type::Join(ty) => ty.inner_type(), 70 | } 71 | } 72 | 73 | pub fn qualified_inner_name(&self) -> TokenStream { 74 | match self { 75 | Type::Inner(ty) => { 76 | let segments = ty.path.iter(); 77 | let ident = &ty.ident; 78 | quote::quote! { 79 | #(#segments)::* #ident 80 | } 81 | } 82 | Type::Option(ty) => ty.qualified_inner_name(), 83 | Type::Vec(ty) => ty.qualified_inner_name(), 84 | Type::Join(ty) => ty.qualified_inner_name(), 85 | } 86 | } 87 | } 88 | 89 | impl From for Type { 90 | fn from(value: InnerType) -> Self { 91 | match value.ident.to_string().as_str() { 92 | "Option" => { 93 | let ty = value.args.unwrap(); 94 | Type::Option(Box::new(Type::from(*ty))) 95 | } 96 | "Vec" => { 97 | let ty = value.args.unwrap(); 98 | Type::Vec(Box::new(Type::from(*ty))) 99 | } 100 | "Join" => { 101 | let ty = value.args.unwrap(); 102 | Type::Join(Box::new(Type::from(*ty))) 103 | } 104 | _ => Type::Inner(value), 105 | } 106 | } 107 | } 108 | 109 | impl From<&syn::Path> for Type { 110 | fn from(path: &syn::Path) -> Self { 111 | let other = InnerType::from(path); 112 | Type::from(other) 113 | } 114 | } 115 | 116 | impl quote::ToTokens for Type { 117 | fn to_tokens(&self, tokens: &mut TokenStream) { 118 | match self { 119 | Type::Option(ty) => { 120 | tokens.append_all(quote::quote! { Option<#ty> }); 121 | } 122 | Type::Vec(ty) => { 123 | tokens.append_all(quote::quote! { Vec<#ty> }); 124 | } 125 | Type::Inner(ty) => { 126 | ty.to_tokens(tokens); 127 | } 128 | Type::Join(ty) => { 129 | tokens.append_all(quote::quote! { ormlite::model::Join<#ty> }); 130 | } 131 | } 132 | } 133 | } 134 | 135 | impl PartialEq<&str> for Type { 136 | fn eq(&self, other: &&str) -> bool { 137 | let Type::Inner(t) = self else { 138 | return false; 139 | }; 140 | t.ident == other 141 | } 142 | } 143 | 144 | #[derive(Clone, Debug, PartialEq, Eq, Hash)] 145 | pub struct InnerType { 146 | pub path: Vec, 147 | pub ident: Ident, 148 | pub args: Option>, 149 | } 150 | 151 | impl InnerType { 152 | #[doc(hidden)] 153 | pub fn mock(ident: &str) -> Self { 154 | Self { 155 | path: vec![], 156 | ident: Ident::from(ident), 157 | args: None, 158 | } 159 | } 160 | } 161 | 162 | impl From<&syn::Path> for InnerType { 163 | fn from(path: &syn::Path) -> Self { 164 | let segment = path.segments.last().expect("path must have at least one segment"); 165 | let args: Option> = if let PathArguments::AngleBracketed(args) = &segment.arguments { 166 | let args = &args.args; 167 | let syn::GenericArgument::Type(ty) = args.first().unwrap() else { 168 | panic!("expected type syntax tree inside angle brackets"); 169 | }; 170 | let syn::Type::Path(path) = &ty else { 171 | panic!("expected path syntax tree inside angle brackets"); 172 | }; 173 | Some(Box::new(InnerType::from(&path.path))) 174 | } else { 175 | None 176 | }; 177 | let mut path = path.segments.iter().map(|s| Ident::from(&s.ident)).collect::>(); 178 | let ident = path.pop().expect("path must have at least one segment"); 179 | InnerType { path, args, ident } 180 | } 181 | } 182 | 183 | impl quote::ToTokens for InnerType { 184 | fn to_tokens(&self, tokens: &mut TokenStream) { 185 | let args = if let Some(args) = &self.args { 186 | quote::quote! { <#args> } 187 | } else { 188 | quote::quote! {} 189 | }; 190 | let path = &self.path; 191 | let ident = &self.ident; 192 | tokens.append_all(quote::quote! { #(#path ::)* #ident #args }); 193 | } 194 | } 195 | 196 | #[cfg(test)] 197 | mod tests { 198 | use super::*; 199 | 200 | #[test] 201 | fn test_primitive() { 202 | use syn::Path; 203 | let ty = Type::from(&syn::parse_str::("i32").unwrap()); 204 | assert!(!ty.is_json()); 205 | 206 | let ty = Type::from(&syn::parse_str::("Json").unwrap()); 207 | assert!(ty.is_json()); 208 | } 209 | 210 | #[test] 211 | fn test_other_type_to_quote() { 212 | use syn::Path; 213 | let ty = Type::from(&syn::parse_str::("rust_decimal::Decimal").unwrap()); 214 | let Type::Inner(ty) = &ty else { 215 | panic!("expected primitive"); 216 | }; 217 | assert_eq!(ty.ident, "Decimal"); 218 | assert_eq!(ty.path.len(), 1); 219 | assert_eq!(ty.path[0], "rust_decimal"); 220 | let z = quote::quote!(#ty); 221 | assert_eq!(z.to_string(), "rust_decimal :: Decimal"); 222 | } 223 | } 224 | -------------------------------------------------------------------------------- /cli/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "ormlite-cli" 3 | version = "0.23.3" 4 | edition = "2021" 5 | description = "An ORM for people who love SQL. Use the `ormlite` crate, not this one." 6 | authors = ["Kurt Wolf "] 7 | license = "MIT" 8 | repository = "https://github.com/kurtbuilds/ormlite" 9 | 10 | [[bin]] 11 | name = "ormlite" 12 | path = "src/main.rs" 13 | 14 | [features] 15 | default = [ 16 | "ormlite/postgres", 17 | "ormlite/uuid", 18 | "ormlite/time", 19 | "ormlite/json", 20 | "ormlite/chrono", 21 | "ormlite/runtime-tokio-rustls", 22 | ] 23 | experimental-sid = [] 24 | 25 | [dependencies] 26 | anyhow.workspace = true 27 | time = { version = "0.3", features = ["formatting"] } 28 | clap = { version = "4", features = ["derive"] } 29 | ormlite.workspace = true 30 | ormlite-core.workspace = true 31 | ormlite-attr.workspace = true 32 | tokio = { version = "1", features = ["full"] } 33 | sqlmo.workspace = true 34 | sqlmo_sqlx.workspace = true 35 | syn = "2" 36 | proc-macro2 = "1" 37 | url = "2" 38 | sha2 = "0.10" 39 | regex = "1" 40 | colored = "2" 41 | tracing-subscriber = "0.3" 42 | tracing = "0.1" 43 | toml = { version = "0.8.19", features = ["preserve_order"] } 44 | serde = { version = "1.0.210", features = ["derive"] } 45 | indexmap = { version = "2.5.0", features = ["serde"] } 46 | dirs = "6.0.0" 47 | 48 | [dev-dependencies] 49 | assert_matches = "1" 50 | -------------------------------------------------------------------------------- /cli/Justfile: -------------------------------------------------------------------------------- 1 | set positional-arguments 2 | set dotenv-load := true 3 | 4 | help: 5 | @just --list --unsorted 6 | 7 | clean: 8 | cargo clean 9 | 10 | build: 11 | cargo build 12 | alias b := build 13 | 14 | run *args: 15 | cargo run -- "$@" 16 | alias r := run 17 | 18 | release: 19 | cargo build --release 20 | 21 | install: 22 | cargo install --path . --features experimental-sid 23 | 24 | # Tools for development 25 | bootstrap: 26 | cargo install cargo-edit 27 | cargo install --git https://github.com/kurtbuilds/toml-cli 28 | 29 | test *args: 30 | cargo test 31 | 32 | check: 33 | cargo check 34 | alias c := check 35 | 36 | fix: 37 | cargo clippy --fix 38 | 39 | bench: 40 | cargo criterion --features bench 41 | 42 | # Bump version. level=major,minor,patch 43 | version level: 44 | git diff-index --exit-code HEAD > /dev/null || ! echo You have untracked changes. Commit your changes before bumping the version. 45 | cargo set-version --bump {{level}} 46 | cargo update # This bumps Cargo.lock 47 | VERSION=$(toml get Cargo.toml package.version) && \ 48 | git commit -am "Bump version {{level}} to $VERSION" && \ 49 | git tag v$VERSION && \ 50 | git push origin v$VERSION 51 | git push 52 | 53 | publish: 54 | cargo publish 55 | 56 | patch: test 57 | just version patch 58 | just publish 59 | -------------------------------------------------------------------------------- /cli/src/command.rs: -------------------------------------------------------------------------------- 1 | mod down; 2 | mod init; 3 | mod migrate; 4 | mod up; 5 | mod info; 6 | 7 | pub use down::*; 8 | pub use init::*; 9 | pub use info::*; 10 | pub use migrate::*; 11 | pub use up::*; 12 | -------------------------------------------------------------------------------- /cli/src/command/down.rs: -------------------------------------------------------------------------------- 1 | use std::env::var; 2 | use std::fs; 3 | 4 | use anyhow::{Error, Result}; 5 | use clap::Parser; 6 | use std::path::Path; 7 | 8 | use crate::command::{get_executed_migrations, get_pending_migrations, MigrationType}; 9 | use crate::util::{create_runtime, CommandSuccess}; 10 | use ormlite::postgres::{PgArguments, PgConnection}; 11 | use ormlite::Arguments; 12 | use ormlite::{Acquire, Connection, Executor}; 13 | use ormlite_core::config::{get_var_database_url, get_var_migration_folder, get_var_snapshot_folder}; 14 | use url::Url; 15 | use anyhow::anyhow; 16 | 17 | #[derive(Parser, Debug)] 18 | pub struct Down { 19 | target: Option, 20 | 21 | #[clap(long, short)] 22 | /// By default, the `down` command will perform a dry run. Use -f to run it. 23 | force: bool, 24 | } 25 | 26 | const CLEAR_DATABASE_QUERY: &str = "DROP SCHEMA public CASCADE; 27 | CREATE SCHEMA public; 28 | GRANT ALL ON SCHEMA public TO $USER; 29 | GRANT ALL ON SCHEMA public TO public; 30 | "; 31 | 32 | fn get_backups(backup_folder: &Path) -> Result> { 33 | let mut backups = std::fs::read_dir(backup_folder)? 34 | .filter_map(|entry| { 35 | let entry = entry.ok()?; 36 | let file_name = entry.file_name().into_string().ok()?; 37 | if file_name.ends_with(".sql.bak") { 38 | Some(file_name) 39 | } else { 40 | None 41 | } 42 | }) 43 | .collect::>(); 44 | backups.sort(); 45 | Ok(backups) 46 | } 47 | 48 | impl Down { 49 | pub fn run(self) -> Result<()> { 50 | let folder = get_var_migration_folder(); 51 | let runtime = create_runtime(); 52 | let url = get_var_database_url(); 53 | let mut conn = runtime.block_on(PgConnection::connect(&url))?; 54 | let conn = runtime.block_on(conn.acquire())?; 55 | 56 | let mut executed = runtime.block_on(get_executed_migrations(&mut *conn))?; 57 | let pending = get_pending_migrations(&folder)? 58 | .into_iter() 59 | .filter(|m| m.migration_type() != MigrationType::Up) 60 | .collect::>(); 61 | 62 | let Some(last_pending) = pending.last() else { 63 | return Err(Error::msg("No migrations were found in the migrations folder.")); 64 | }; 65 | 66 | executed.reverse(); 67 | if last_pending.migration_type() == MigrationType::Simple { 68 | let target = if let Some(target) = self.target { 69 | target 70 | } else if executed.len() > 1 { 71 | executed.iter().nth(1).unwrap().name.clone() 72 | } else if executed.len() == 1 { 73 | "0_empty".to_string() 74 | } else { 75 | return Err(Error::msg( 76 | "No target migration was specified and there are no migrations to rollback to.", 77 | )); 78 | }; 79 | 80 | let snapshot_folder = get_var_snapshot_folder(); 81 | let backups = get_backups(&snapshot_folder)?; 82 | let Some(backup) = backups.iter().find(|b| { 83 | if target.chars().all(|c| c.is_numeric()) { 84 | b.split_once('_').map(|(version, _)| version == target).unwrap_or(false) 85 | } else if target.chars().next().map(|c| c.is_numeric()).unwrap_or(false) && target.contains('_') { 86 | // my_description 87 | **b == format!("{target}.sql.bak") 88 | } else { 89 | b.split_once('_').map(|(_, desc)| desc == target).unwrap_or(false) 90 | } 91 | }) else { 92 | return Err(Error::msg(format!( 93 | "Looked for snapshot `{}` in {}, but could not find it.", 94 | target, 95 | snapshot_folder.display() 96 | ))); 97 | }; 98 | 99 | if !self.force { 100 | println!( 101 | "Re-run with -f to execute rollback. This command will restore the following snapshot:\n{}", 102 | snapshot_folder.join(backup).display() 103 | ); 104 | return Ok(()); 105 | } 106 | 107 | let mut user = Url::parse(&url)?.username().to_string(); 108 | if user.is_empty() { 109 | user = var("USER")? 110 | } 111 | 112 | runtime.block_on(conn.execute(&*CLEAR_DATABASE_QUERY.replace("$USER", &user)))?; 113 | let restore_file = fs::File::open(snapshot_folder.join(backup))?; 114 | std::process::Command::new("psql") 115 | .arg(url) 116 | .arg("-q") 117 | .stdin(restore_file) 118 | .ok_or("Failed to restore database.")?; 119 | } else { 120 | if let Some(target) = self.target { 121 | executed = executed 122 | .into_iter() 123 | .take_while(|m| { 124 | let matches = if target.chars().all(|c| c.is_numeric()) { 125 | m.version_str() == target 126 | } else if target.chars().next().map(|c| c.is_numeric()).unwrap_or(false) && target.contains('_') 127 | { 128 | // my_description 129 | m.name == target 130 | } else { 131 | m.description == target 132 | }; 133 | !matches 134 | }) 135 | .collect(); 136 | } else { 137 | executed.truncate(1); 138 | } 139 | if !self.force { 140 | println!("Re-run with -f to execute rollbacks. This command will run the following rollbacks:"); 141 | } 142 | for migration in executed { 143 | let file_path = folder.join(migration.name).with_extension("down.sql"); 144 | if !self.force { 145 | println!("{}", file_path.display()); 146 | } else { 147 | let body = fs::read_to_string(&file_path)?; 148 | let conn = &mut *conn; 149 | runtime.block_on(conn.execute(&*body))?; 150 | let mut args = PgArguments::default(); 151 | args.add(migration.version).map_err(|e| anyhow!(e))?; 152 | let q = ormlite::query_with("DELETE FROM _sqlx_migrations WHERE version = $1", args); 153 | runtime.block_on(q.execute(conn))?; 154 | } 155 | } 156 | } 157 | Ok(()) 158 | } 159 | } 160 | -------------------------------------------------------------------------------- /cli/src/command/info.rs: -------------------------------------------------------------------------------- 1 | use crate::schema::schema_from_ormlite_project; 2 | use crate::util::create_runtime; 3 | use anyhow::Result; 4 | use clap::Parser; 5 | use ormlite::postgres::PgConnection; 6 | use ormlite::{Acquire, Connection}; 7 | use ormlite_core::config::{get_var_database_url, get_var_model_folders}; 8 | use sqlmo::Schema; 9 | use sqlmo_sqlx::FromPostgres; 10 | 11 | #[derive(Parser, Debug)] 12 | pub struct Info { 13 | /// The name of the migration 14 | #[clap(long, short)] 15 | url: bool, 16 | 17 | #[clap(long, short)] 18 | table: Option, 19 | } 20 | 21 | impl Info { 22 | pub fn run(self) -> Result<()> { 23 | // let folder = get_var_migration_folder(); 24 | let runtime = create_runtime(); 25 | let c = crate::config::load_config()?; 26 | let mut current = if self.url { 27 | let url = get_var_database_url(); 28 | let mut conn = runtime.block_on(PgConnection::connect(&url))?; 29 | let conn = runtime.block_on(conn.acquire())?; 30 | runtime.block_on(Schema::try_from_postgres(conn, "public"))? 31 | } else { 32 | let folder_paths = get_var_model_folders(); 33 | let folder_paths = folder_paths.iter().map(|p| p.as_path()).collect::>(); 34 | schema_from_ormlite_project(&folder_paths, &c)? 35 | }; 36 | if let Some(s) = self.table { 37 | current.tables.retain(|t| t.name == s); 38 | } 39 | for table in current.tables { 40 | eprintln!("Table: {}", table.name); 41 | for column in table.columns { 42 | let nullable = if column.nullable { " " } else { " NOT NULL " }; 43 | let constraint = if let Some(constraint) = &column.constraint { 44 | format!(" {:?}", constraint) 45 | } else { 46 | "".to_string() 47 | }; 48 | eprintln!(" {}: {:?}{}{}", column.name, column.typ, nullable, constraint) 49 | } 50 | } 51 | Ok(()) 52 | } 53 | } 54 | -------------------------------------------------------------------------------- /cli/src/command/init.rs: -------------------------------------------------------------------------------- 1 | use anyhow::Result; 2 | use clap::Parser; 3 | use colored::Colorize; 4 | use ormlite::{Acquire, Connection}; 5 | 6 | use ormlite::postgres::PgConnection; 7 | use ormlite::Executor; 8 | 9 | use crate::util::create_runtime; 10 | use ormlite_core::config::get_var_database_url; 11 | 12 | const INIT_QUERY: &str = r#" 13 | CREATE TABLE public._sqlx_migrations ( 14 | version bigint NOT NULL, 15 | description text NOT NULL, 16 | installed_on timestamp with time zone DEFAULT now() NOT NULL, 17 | success boolean NOT NULL, 18 | checksum bytea NOT NULL, 19 | execution_time bigint NOT NULL 20 | ); 21 | "#; 22 | 23 | #[derive(Parser, Debug)] 24 | pub struct Init {} 25 | 26 | impl Init { 27 | pub fn run(self) -> Result<()> { 28 | let runtime = create_runtime(); 29 | let url = get_var_database_url(); 30 | runtime.block_on(async { 31 | let mut conn = PgConnection::connect(&url).await?; 32 | let conn = conn.acquire().await?; 33 | let table_exists = conn.execute("select 1 from _sqlx_migrations limit 1").await.is_ok(); 34 | if table_exists { 35 | eprintln!( 36 | "{} Database {} already initialized. No actions taken.", 37 | "SUCCESS".green(), 38 | url 39 | ); 40 | return Ok(()); 41 | } 42 | conn.execute(INIT_QUERY).await?; 43 | eprintln!("{} Initialized database at {}", "SUCCESS".green(), url); 44 | Ok(()) 45 | }) 46 | } 47 | } 48 | -------------------------------------------------------------------------------- /cli/src/command/up.rs: -------------------------------------------------------------------------------- 1 | use crate::command::{get_executed_migrations, get_pending_migrations, MigrationType}; 2 | use crate::util::{create_runtime, CommandSuccess}; 3 | use anyhow::{anyhow, Result}; 4 | use clap::Parser; 5 | use ormlite::postgres::{PgArguments, PgConnection}; 6 | use ormlite::{Acquire, Arguments, Connection, Executor}; 7 | use ormlite_core::config::{get_var_database_url, get_var_migration_folder, get_var_snapshot_folder}; 8 | use sha2::{Digest, Sha384}; 9 | use std::collections::HashSet; 10 | use std::fs; 11 | use std::fs::File; 12 | use std::time::Instant; 13 | use tracing::debug; 14 | 15 | #[derive(Parser, Debug)] 16 | pub struct Up { 17 | #[clap(long, short)] 18 | /// Run all pending migrations. If not set, only the first pending migration will be run. 19 | all: bool, 20 | 21 | #[clap(long, short)] 22 | /// Only affects `up` command when using simple (up-only) migrations. Causes command to skip creating the backup. 23 | no_snapshot: bool, 24 | 25 | #[clap(long, short)] 26 | /// Only affects `up` command when using up/down migrations. Causes command to create a backup. 27 | snapshot: bool, 28 | } 29 | 30 | impl Up { 31 | pub fn run(self) -> Result<()> { 32 | let folder = get_var_migration_folder(); 33 | let runtime = create_runtime(); 34 | let url = get_var_database_url(); 35 | let mut conn = runtime.block_on(PgConnection::connect(&url))?; 36 | let conn = runtime.block_on(conn.acquire()).unwrap(); 37 | 38 | let executed = runtime.block_on(get_executed_migrations(conn))?; 39 | let pending = get_pending_migrations(&folder) 40 | .unwrap() 41 | .into_iter() 42 | .filter(|m| m.migration_type() != MigrationType::Down) 43 | .collect::>(); 44 | for e in &executed { 45 | debug!("Executed: {}", e.full_name()); 46 | } 47 | debug!("{} migrations executed", executed.len()); 48 | for p in &pending { 49 | debug!("Pending: {}", p.full_name()); 50 | } 51 | debug!("{} migrations pending", pending.len()); 52 | 53 | let pending_hashset = pending.iter().map(|m| m.version).collect::>(); 54 | for e in &executed { 55 | if !pending_hashset.contains(&e.version) { 56 | return Err(anyhow!("Migration {} was executed on the database, but was not found in your migrations folder. Your migrations are out of sync.", e.full_name())); 57 | } 58 | } 59 | if executed.len() == pending.len() { 60 | eprintln!("No migrations to run."); 61 | return Ok(()); 62 | } 63 | let last_executed = executed.last().map(|m| m.name.clone()).unwrap_or("0_empty".to_string()); 64 | let executed = executed.into_iter().map(|m| m.version).collect::>(); 65 | 66 | let pending = pending 67 | .into_iter() 68 | .filter(|m| !executed.contains(&m.version)) 69 | .collect::>(); 70 | 71 | let is_simple = pending.last().as_ref().unwrap().migration_type() == MigrationType::Simple; 72 | if (is_simple && !self.no_snapshot) || (!is_simple && self.snapshot) { 73 | eprintln!("Creating snapshot..."); 74 | let snapshot_folder = get_var_snapshot_folder(); 75 | fs::create_dir_all(&snapshot_folder).unwrap(); 76 | let file_path = snapshot_folder.join(format!("{last_executed}.sql.bak")); 77 | let backup_file = File::create(&file_path)?; 78 | std::process::Command::new("pg_dump") 79 | .arg(&url) 80 | .stdout(backup_file) 81 | .output()? 82 | .ok_or("Failed to create backup")?; 83 | eprintln!("{}: Created database snapshot.", file_path.display()); 84 | } 85 | 86 | let pending = pending.iter().take(if self.all { pending.len() } else { 1 }); 87 | for migration in pending { 88 | debug!("Running migration: {}", migration.name); 89 | let file_path = folder 90 | .join(&migration.name) 91 | .with_extension(migration.migration_type().extension()); 92 | let body = fs::read_to_string(&file_path)?; 93 | 94 | let checksum = Sha384::digest(body.as_bytes()).to_vec(); 95 | 96 | let start = Instant::now(); 97 | runtime 98 | .block_on(conn.execute(&*body)) 99 | .map_err(|e| anyhow!("Error while running migration {}: {}", &migration.name, e))?; 100 | let elapsed = start.elapsed(); 101 | 102 | let mut args = PgArguments::default(); 103 | args.add(migration.version).map_err(|e| anyhow!(e))?; 104 | args.add(&migration.description).map_err(|e| anyhow!(e))?; 105 | args.add(checksum).map_err(|e| anyhow!(e))?; 106 | args.add(elapsed.as_nanos() as i64).map_err(|e| anyhow!(e))?; 107 | let q = ormlite::query_with("INSERT INTO _sqlx_migrations (version, description, installed_on, success, checksum, execution_time) VALUES ($1, $2, NOW(), true, $3, $4)", args); 108 | runtime.block_on(q.execute(&mut *conn))?; 109 | eprintln!("{}: Executed migration", file_path.display()); 110 | } 111 | Ok(()) 112 | } 113 | } 114 | -------------------------------------------------------------------------------- /cli/src/config.rs: -------------------------------------------------------------------------------- 1 | use anyhow::Result as AnyResult; 2 | use std::{fs::File, io::Read, path::Path}; 3 | pub use ormlite_core::config::Config; 4 | 5 | static CONFIG_REL_PATHS: [&str; 2] = [".ormlite/config.toml", ".ormlite.toml"]; 6 | 7 | pub fn load_config() -> AnyResult { 8 | let home_dir = dirs::home_dir().unwrap(); 9 | let config_dir = dirs::config_dir().unwrap(); 10 | let search_paths: &[&str] = &[".", "..", config_dir.to_str().unwrap(), home_dir.to_str().unwrap()]; 11 | for p in search_paths { 12 | for rel_path in &CONFIG_REL_PATHS { 13 | let path = format!("{}/{}", p, rel_path); 14 | let path = Path::new(&path); 15 | if path.exists() { 16 | return read(path); 17 | } 18 | } 19 | } 20 | Ok(Config::default()) 21 | } 22 | 23 | pub fn read(path: impl AsRef) -> AnyResult { 24 | let path = path.as_ref(); 25 | let mut file = File::open(path)?; 26 | let mut buf = String::new(); 27 | file.read_to_string(&mut buf)?; 28 | let config: Config = toml::from_str(&buf)?; 29 | Ok(config) 30 | } 31 | 32 | 33 | -------------------------------------------------------------------------------- /cli/src/main.rs: -------------------------------------------------------------------------------- 1 | use anyhow::Result; 2 | use clap::{Parser, Subcommand}; 3 | use tracing::Level; 4 | use tracing_subscriber::layer::SubscriberExt; 5 | use tracing_subscriber::util::SubscriberInitExt; 6 | 7 | mod command; 8 | mod util; 9 | mod config; 10 | mod schema; 11 | 12 | use command::*; 13 | 14 | #[derive(Parser, Debug)] 15 | #[command(author, version, about)] 16 | struct Cli { 17 | #[command(subcommand)] 18 | command: Command, 19 | #[clap(long, short, global = true)] 20 | verbose: bool, 21 | } 22 | 23 | #[derive(Subcommand, Debug)] 24 | pub enum Command { 25 | /// Create a migration. Supports auto-generation based on models detected in the codebase. 26 | Migrate(Migrate), 27 | /// Run up migrations. Runs one migration, or pass `--all` to run all pending migrations. 28 | /// Optionally creates a snapshots to enable rollback, even when you only write an up migration. 29 | Up(Up), 30 | /// Run down migration. If no target revision is specified, the last migration will be run. 31 | Down(Down), 32 | /// Initiailize the database for use with `ormlite`. Creates the migrations table. 33 | Init(Init), 34 | /// Initiailize the database for use with `ormlite`. Creates the migrations table. 35 | Debug(Info), 36 | } 37 | 38 | fn main() -> Result<()> { 39 | let cli = Cli::parse(); 40 | let level = if cli.verbose { Level::DEBUG } else { Level::INFO }; 41 | tracing_subscriber::registry() 42 | .with(tracing_subscriber::fmt::layer().without_time()) 43 | .with( 44 | tracing_subscriber::filter::Targets::new() 45 | .with_target(env!("CARGO_BIN_NAME"), level) 46 | .with_target("ormlite_attr", level) 47 | .with_target("sqlmo", level), 48 | ) 49 | .init(); 50 | use Command::*; 51 | match cli.command { 52 | Migrate(m) => m.run(), 53 | Up(up) => up.run(), 54 | Down(down) => down.run(), 55 | Init(init) => init.run(), 56 | Debug(info) => info.run(), 57 | } 58 | } -------------------------------------------------------------------------------- /cli/src/schema.rs: -------------------------------------------------------------------------------- 1 | use std::collections::HashMap; 2 | use std::path::Path; 3 | use sqlmo::{Constraint, Schema, Table}; 4 | use ormlite_attr::{schema_from_filepaths, Ident, InnerType, Type}; 5 | use ormlite_core::schema::FromMeta; 6 | use anyhow::Result as AnyResult; 7 | use crate::config::Config; 8 | 9 | pub fn schema_from_ormlite_project(paths: &[&Path], c: &Config) -> AnyResult { 10 | let mut schema = Schema::default(); 11 | let mut fs_schema = schema_from_filepaths(paths)?; 12 | let primary_key_type: HashMap = fs_schema 13 | .tables 14 | .iter() 15 | .map(|t| { 16 | let pkey_ty = t.pkey.ty.inner_type().clone(); 17 | (t.ident.to_string(), pkey_ty) 18 | }) 19 | .collect(); 20 | for t in &mut fs_schema.tables { 21 | for c in &mut t.table.columns { 22 | // replace alias types with the real type. 23 | let inner = c.ty.inner_type_mut(); 24 | if let Some(f) = fs_schema.type_reprs.get(&inner.ident.to_string()) { 25 | inner.ident = Ident::from(f); 26 | } 27 | // replace join types with the primary key type. 28 | if c.ty.is_join() { 29 | let model_name = c.ty.inner_type_name(); 30 | let pkey = primary_key_type 31 | .get(&model_name) 32 | .expect(&format!("Could not find model {} for join", model_name)); 33 | c.ty = Type::Inner(pkey.clone()); 34 | } 35 | } 36 | } 37 | for table in fs_schema.tables { 38 | let table = Table::from_meta(&table); 39 | schema.tables.push(table); 40 | } 41 | let mut table_names: HashMap = 42 | schema.tables.iter().map(|t| (t.name.clone(), (t.name.clone(), t.primary_key().unwrap().name.clone()))).collect(); 43 | for (alias, real) in &c.table.aliases { 44 | let Some(real) = table_names.get(real) else { 45 | continue; 46 | }; 47 | table_names.insert(alias.clone(), real.clone()); 48 | } 49 | for table in &mut schema.tables { 50 | for column in &mut table.columns { 51 | if column.primary_key { 52 | continue; 53 | } 54 | if column.name.ends_with("_id") || column.name.ends_with("_uuid") { 55 | let Some((model_name, _)) = column.name.rsplit_once('_') else { 56 | continue; 57 | }; 58 | if let Some((t, pkey)) = table_names.get(model_name) { 59 | let constraint = Constraint::foreign_key(t.to_string(), vec![pkey.clone()]); 60 | column.constraint = Some(constraint); 61 | } 62 | } 63 | } 64 | } 65 | Ok(schema) 66 | } -------------------------------------------------------------------------------- /cli/src/util.rs: -------------------------------------------------------------------------------- 1 | use anyhow::Error; 2 | 3 | pub(crate) fn create_runtime() -> tokio::runtime::Runtime { 4 | tokio::runtime::Builder::new_current_thread() 5 | .enable_io() 6 | .enable_time() 7 | .build() 8 | .unwrap() 9 | } 10 | 11 | pub trait CommandSuccess { 12 | fn ok_or(&mut self, message: &str) -> Result<(), Error>; 13 | } 14 | 15 | impl CommandSuccess for std::process::Command { 16 | fn ok_or(&mut self, message: &str) -> Result<(), Error> { 17 | let status = self.status()?; 18 | if status.success() { 19 | Ok(()) 20 | } else { 21 | Err(Error::msg(message.to_string())) 22 | } 23 | } 24 | } 25 | 26 | impl CommandSuccess for std::process::Output { 27 | fn ok_or(&mut self, message: &str) -> Result<(), Error> { 28 | if self.status.success() { 29 | Ok(()) 30 | } else { 31 | Err(Error::msg(message.to_string())) 32 | } 33 | } 34 | } 35 | -------------------------------------------------------------------------------- /core/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "ormlite-core" 3 | version = "0.23.4" 4 | edition = "2021" 5 | description = "An ORM for people who love SQL. Use the `ormlite` crate, not this one." 6 | authors = ["Kurt Wolf "] 7 | license = "MIT" 8 | repository = "https://github.com/kurtbuilds/ormlite" 9 | 10 | [features] 11 | mysql = ["sqlx/mysql"] 12 | sqlite = ["sqlx/sqlite"] 13 | postgres = ["sqlx/postgres"] 14 | runtime-tokio-rustls = ["sqlx/runtime-tokio-rustls"] 15 | 16 | [dependencies] 17 | tokio = { version = "1", features = ["full"] } 18 | futures.workspace = true 19 | sqlx.workspace = true 20 | sqlparser = "0.51.0" 21 | thiserror = "1" 22 | sqlmo.workspace = true 23 | ormlite-attr.workspace = true 24 | indexmap.workspace = true 25 | proc-macro2.workspace = true 26 | anyhow.workspace = true 27 | syn.workspace = true 28 | serde.workspace = true 29 | 30 | [dev-dependencies] 31 | syn = "2" 32 | assert_matches = "1" 33 | uuid = { version = "1", features = ["serde", "v4"] } 34 | -------------------------------------------------------------------------------- /core/Justfile: -------------------------------------------------------------------------------- 1 | set dotenv-load := false 2 | 3 | help: 4 | @just --list --unsorted 5 | 6 | clean: 7 | cargo clean 8 | 9 | build: 10 | cargo build 11 | alias b := build 12 | 13 | run *args: 14 | cargo run {{args}} 15 | alias r := run 16 | 17 | release: 18 | cargo build --release 19 | 20 | install: 21 | cargo install --path . 22 | 23 | # Tools for development 24 | bootstrap: 25 | cargo install cargo-edit 26 | cargo install --git https://github.com/kurtbuilds/toml-cli 27 | 28 | test *args: 29 | cargo test --features runtime-tokio-rustls,sqlx/uuid -- "$@" 30 | 31 | check: 32 | cargo check 33 | alias c := check 34 | 35 | fix: 36 | cargo clippy --fix 37 | 38 | bench: 39 | cargo criterion --features bench 40 | 41 | # Bump version. level=major,minor,patch 42 | version level: 43 | git diff-index --exit-code HEAD > /dev/null || ! echo You have untracked changes. Commit your changes before bumping the version. 44 | cargo set-version --bump {{level}} 45 | cargo update # This bumps Cargo.lock 46 | VERSION=$(toml get Cargo.toml package.version) && \ 47 | git commit -am "Bump version {{level}} to $VERSION" && \ 48 | git tag v$VERSION && \ 49 | git push origin v$VERSION 50 | git push 51 | 52 | publish: 53 | cargo publish 54 | 55 | patch: test 56 | just version patch 57 | just publish 58 | -------------------------------------------------------------------------------- /core/README.md: -------------------------------------------------------------------------------- 1 | Please see [`ormlite`](https://crates.io/crates/ormlite). 2 | -------------------------------------------------------------------------------- /core/src/config.rs: -------------------------------------------------------------------------------- 1 | use std::env::var; 2 | use std::path::PathBuf; 3 | use std::str::FromStr; 4 | use serde::{Deserialize, Serialize}; 5 | use indexmap::IndexMap; 6 | 7 | const MIGRATION_FOLDER: &str = "migrations"; 8 | pub const MIGRATION_TABLE: &str = "_sqlx_migrations"; 9 | const MIGRATION_SNAPSHOT_FOLDER: &str = "migrations/snapshot"; 10 | pub const MODEL_FOLDERS: &str = "."; 11 | 12 | pub fn get_var_migration_folder() -> PathBuf { 13 | let folder = var("MIGRATION_FOLDER").unwrap_or_else(|_| MIGRATION_FOLDER.to_string()); 14 | PathBuf::from_str(&folder).unwrap() 15 | } 16 | 17 | pub fn get_var_snapshot_folder() -> PathBuf { 18 | let folder = var("MIGRATION_BACKUP_FOLDER").unwrap_or_else(|_| MIGRATION_SNAPSHOT_FOLDER.to_string()); 19 | PathBuf::from_str(&folder).unwrap() 20 | } 21 | 22 | pub fn get_var_database_url() -> String { 23 | var("DATABASE_URL").expect("DATABASE_URL must be set") 24 | } 25 | 26 | pub fn get_var_model_folders() -> Vec { 27 | let folders = var("MODEL_FOLDERS").unwrap_or_else(|_| MODEL_FOLDERS.to_string()); 28 | folders.split(',').map(|s| PathBuf::from_str(s).unwrap()).collect() 29 | } 30 | 31 | #[derive(Debug, Serialize, Deserialize, Clone, Default)] 32 | pub struct Table { 33 | /// When auto detecting foreign keys, use this aliases 34 | /// For example, if you have a table organization, but the foreign key is org_id, 35 | /// you'd define the alias as "org" => "organization" 36 | pub aliases: IndexMap, 37 | } 38 | 39 | #[derive(Debug, Deserialize, Serialize, Clone, Default)] 40 | pub struct Config { 41 | pub table: Table, 42 | } 43 | -------------------------------------------------------------------------------- /core/src/error.rs: -------------------------------------------------------------------------------- 1 | pub type Result = std::result::Result; 2 | 3 | #[derive(Debug, thiserror::Error)] 4 | pub enum Error { 5 | #[error(transparent)] 6 | SqlxError(#[from] sqlx::Error), 7 | 8 | #[error(transparent)] 9 | TokenizationError(#[from] sqlparser::tokenizer::TokenizerError), 10 | 11 | #[error("{0}")] 12 | OrmliteError(String), 13 | } 14 | -------------------------------------------------------------------------------- /core/src/insert.rs: -------------------------------------------------------------------------------- 1 | use crate::Result; 2 | use futures::future::BoxFuture; 3 | pub use sqlmo::query::OnConflict; 4 | use sqlmo::{Dialect, Insert, ToSql}; 5 | 6 | /// Represents an insert query. 7 | /// We had to turn this into a model because we need to pass in the on_conflict configuration. 8 | pub struct Insertion<'a, Acquire, Model, DB: sqlx::Database> { 9 | pub acquire: Acquire, 10 | pub model: Model, 11 | pub closure: Box BoxFuture<'a, Result>>, 12 | pub insert: Insert, 13 | pub _db: std::marker::PhantomData, 14 | } 15 | 16 | impl<'a, Acquire, Model, DB: sqlx::Database> Insertion<'a, Acquire, Model, DB> { 17 | pub fn on_conflict(mut self, c: OnConflict) -> Self { 18 | self.insert.on_conflict = c; 19 | self 20 | } 21 | } 22 | 23 | impl<'a, Acquire, Model: crate::model::Model, DB: sqlx::Database> std::future::IntoFuture 24 | for Insertion<'a, Acquire, Model, DB> 25 | { 26 | type Output = Result; 27 | type IntoFuture = BoxFuture<'a, Self::Output>; 28 | 29 | fn into_future(self) -> Self::IntoFuture { 30 | // hack to get around the fact that postgres drops the return 31 | // value in ON CONFLICT DO NOTHING case 32 | // let q = if matches!(self.insert.on_conflict, OnConflict::Ignore) { 33 | // let insert_as_select = Select { 34 | // ctes: vec![ 35 | // Cte::new("inserted", self.insert) 36 | // ], 37 | // columns: vec![SelectColumn::raw("*")], 38 | // from: Some("inserted".into()), 39 | // ..Select::default() 40 | // }; 41 | // let pkey = Model::primary_key().unwrap(); 42 | // let plc_idx = Model::primary_key_placeholder_idx().unwrap(); 43 | // let select_existing = Select { 44 | // from: Some(Model::table_name().into()), 45 | // columns: Model::table_columns().iter().map(|&c| c.into()).collect(), 46 | // where_: format!("{pkey} = ${plc_idx}").into(), 47 | // ..Select::default() 48 | // }; 49 | // let union = Union { 50 | // all: true, 51 | // queries: vec![ 52 | // insert_as_select, 53 | // select_existing 54 | // ] 55 | // }; 56 | // union.to_sql(Dialect::Postgres) 57 | // } else { 58 | // self.insert.to_sql(Dialect::Postgres) 59 | // }; 60 | let q = self.insert.to_sql(Dialect::Postgres); 61 | (self.closure)(self.acquire, self.model, q) 62 | } 63 | } 64 | -------------------------------------------------------------------------------- /core/src/join.rs: -------------------------------------------------------------------------------- 1 | use crate::model::Model; 2 | use serde::de::Error; 3 | use serde::Deserialize; 4 | use serde::{Serialize, Serializer}; 5 | use sqlmo::query::Criteria; 6 | use sqlmo::query::SelectColumn; 7 | use sqlmo::{Expr, Operation, Where}; 8 | use sqlx::{Database, Decode, Encode, Type}; 9 | use std::ops::{Deref, DerefMut}; 10 | 11 | pub trait JoinMeta { 12 | type IdType: Clone + Send + Eq + PartialEq + std::hash::Hash; 13 | fn _id(&self) -> Self::IdType; 14 | } 15 | 16 | impl JoinMeta for Option { 17 | type IdType = Option; 18 | 19 | fn _id(&self) -> Self::IdType { 20 | self.as_ref().map(|x| x._id()) 21 | } 22 | } 23 | 24 | impl JoinMeta for Join { 25 | type IdType = T::IdType; 26 | 27 | fn _id(&self) -> Self::IdType { 28 | self.id.clone() 29 | } 30 | } 31 | 32 | // impl JoinMeta for Vec { 33 | // type IdType = T::IdType; 34 | 35 | // fn _id(&self) -> Self::IdType { 36 | // unimplemented!() 37 | // } 38 | // } 39 | 40 | pub trait Loadable { 41 | #[allow(async_fn_in_trait)] 42 | async fn load<'s, 'e, E>(&'s mut self, db: E) -> crate::error::Result<&'s T> 43 | where 44 | T::IdType: 'e + Send + Sync, 45 | E: 'e + sqlx::Executor<'e, Database = DB>, 46 | T: 's; 47 | } 48 | 49 | #[derive(Debug)] 50 | pub struct Join { 51 | pub id: T::IdType, 52 | data: JoinData, 53 | } 54 | 55 | /// Only represents a many-to-one relationship. 56 | #[derive(Debug)] 57 | pub enum JoinData { 58 | NotQueried, 59 | QueryResult(T), 60 | Modified(T), 61 | } 62 | 63 | impl Join { 64 | pub fn new_with_id(id: T::IdType) -> Self { 65 | Self { 66 | id, 67 | data: JoinData::NotQueried, 68 | } 69 | } 70 | 71 | pub fn new(obj: T) -> Self { 72 | Self { 73 | id: crate::join::JoinMeta::_id(&obj), 74 | data: JoinData::Modified(obj), 75 | } 76 | } 77 | 78 | /// Whether join data has been loaded into memory. 79 | pub fn loaded(&self) -> bool { 80 | match &self.data { 81 | JoinData::NotQueried => false, 82 | JoinData::QueryResult(_) => true, 83 | JoinData::Modified(_) => true, 84 | } 85 | } 86 | 87 | pub fn is_modified(&self) -> bool { 88 | match &self.data { 89 | JoinData::NotQueried => false, 90 | JoinData::QueryResult(_) => false, 91 | JoinData::Modified(_) => true, 92 | } 93 | } 94 | 95 | /// Takes ownership and return any modified data. Leaves the Join in a NotQueried state. 96 | #[doc(hidden)] 97 | pub fn _take_modification(&mut self) -> Option { 98 | let owned = std::mem::replace(&mut self.data, JoinData::NotQueried); 99 | match owned { 100 | JoinData::NotQueried => None, 101 | JoinData::QueryResult(_) => None, 102 | JoinData::Modified(obj) => Some(obj), 103 | } 104 | } 105 | fn transition_to_modified(&mut self) -> &mut T { 106 | let owned = std::mem::replace(&mut self.data, JoinData::NotQueried); 107 | match owned { 108 | JoinData::NotQueried => { 109 | panic!("Tried to deref_mut a joined object, but it has not been queried.") 110 | } 111 | JoinData::QueryResult(r) => { 112 | self.data = JoinData::Modified(r); 113 | } 114 | JoinData::Modified(r) => { 115 | self.data = JoinData::Modified(r); 116 | } 117 | } 118 | match &mut self.data { 119 | JoinData::Modified(r) => r, 120 | _ => unreachable!(), 121 | } 122 | } 123 | 124 | #[doc(hidden)] 125 | pub fn _query_result(obj: T) -> Self { 126 | Self { 127 | id: obj._id(), 128 | data: JoinData::QueryResult(obj), 129 | } 130 | } 131 | } 132 | 133 | impl Loadable for Join 134 | where 135 | DB: Database, 136 | T: JoinMeta + Model + Send, 137 | T::IdType: for<'a> Encode<'a, DB> + for<'a> Decode<'a, DB> + Type, 138 | { 139 | async fn load<'s, 'e, E: sqlx::Executor<'e, Database = DB> + 'e>( 140 | &'s mut self, 141 | conn: E, 142 | ) -> crate::error::Result<&'s T> 143 | where 144 | T::IdType: 'e + Send + Sync, 145 | T: 's, 146 | { 147 | let model = T::fetch_one(self.id.clone(), conn).await?; 148 | self.data = JoinData::QueryResult(model); 149 | let s = &*self; 150 | Ok(s.deref()) 151 | } 152 | } 153 | 154 | impl Deref for Join { 155 | type Target = T; 156 | 157 | fn deref(&self) -> &Self::Target { 158 | match &self.data { 159 | JoinData::NotQueried => { 160 | panic!("Tried to deref a joined object, but it has not been queried.") 161 | } 162 | JoinData::QueryResult(r) => r, 163 | JoinData::Modified(r) => r, 164 | } 165 | } 166 | } 167 | 168 | impl DerefMut for Join { 169 | fn deref_mut(&mut self) -> &mut Self::Target { 170 | self.transition_to_modified() 171 | } 172 | } 173 | 174 | // #[derive(Debug, Copy, Clone)] 175 | // pub enum SemanticJoinType { 176 | // OneToMany, 177 | // ManyToOne, 178 | // ManyToMany(&'static str), 179 | // } 180 | 181 | /// Not meant for end users. 182 | #[doc(hidden)] 183 | #[derive(Debug, Clone, Copy)] 184 | pub enum JoinDescription { 185 | ManyToOne { 186 | /// the columns of the joined table 187 | columns: &'static [&'static str], 188 | /// the name of the joined table 189 | foreign_table: &'static str, 190 | 191 | local_column: &'static str, 192 | /// the field on the local object. joined table is aliased to this to prevent conflicts. 193 | field: &'static str, 194 | foreign_key: &'static str, 195 | }, 196 | } 197 | 198 | pub fn column_alias(field: &str, column: &str) -> String { 199 | format!("__{}__{}", field, column) 200 | } 201 | 202 | pub fn select_columns( 203 | columns: &'static [&'static str], 204 | field: &'static str, 205 | ) -> impl Iterator + 'static { 206 | columns 207 | .iter() 208 | .map(|&c| SelectColumn::table_column(field, c).alias(column_alias(field, c))) 209 | } 210 | 211 | pub fn criteria(local_table: &str, local_column: &str, remote_table: &str, remote_column: &str) -> Criteria { 212 | Criteria::On(Where::Expr(Expr::BinOp( 213 | Operation::Eq, 214 | Expr::Column { 215 | schema: None, 216 | table: Some(local_table.to_string()), 217 | column: local_column.to_string(), 218 | } 219 | .into(), 220 | Expr::Column { 221 | schema: None, 222 | table: Some(remote_table.to_string()), 223 | column: remote_column.to_string(), 224 | } 225 | .into(), 226 | ))) 227 | } 228 | 229 | // impl JoinDescription { 230 | // pub fn join_clause(&self, local_table: &str) -> JoinQueryFragment { 231 | // use SemanticJoinType::*; 232 | // let table = self.table_name; 233 | // let relation = self.relation; 234 | // let local_key = self.key; 235 | // let foreign_key = self.foreign_key; 236 | // let join = match &self.semantic_join_type { 237 | // ManyToOne => { 238 | // format!(r#""{relation}"."{foreign_key}" = "{local_table}"."{local_key}" "#) 239 | // } 240 | // OneToMany => { 241 | // format!(r#""{relation}"."{local_key}" = "{local_table}"."{foreign_key}" "#) 242 | // } 243 | // ManyToMany(_join_table) => { 244 | // unimplemented!() 245 | // } 246 | // }; 247 | // JoinQueryFragment::new(table).alias(self.relation).on_raw(join) 248 | // } 249 | 250 | // pub fn select_columns(&self) -> impl Iterator + '_ { 251 | // let JoinDescription::ManyToOne { 252 | // columns, 253 | // table, 254 | // field, 255 | // foreign_key, 256 | // } = self 257 | // else { 258 | // panic!("ManyToMany not supported yet") 259 | // }; 260 | // columns 261 | // .iter() 262 | // .map(|c| SelectColumn::table_column(field, c).alias(column_alias(field, column))) 263 | // } 264 | // } 265 | 266 | impl Serialize for Join { 267 | fn serialize(&self, serializer: S) -> Result 268 | where 269 | S: Serializer, 270 | { 271 | match &self.data { 272 | JoinData::Modified(data) => data.serialize(serializer), 273 | JoinData::NotQueried => serializer.serialize_none(), 274 | JoinData::QueryResult(data) => data.serialize(serializer), 275 | } 276 | } 277 | } 278 | 279 | impl<'de, T> Deserialize<'de> for Join 280 | where 281 | T: JoinMeta + Deserialize<'de>, 282 | { 283 | fn deserialize(deserializer: D) -> Result 284 | where 285 | D: serde::Deserializer<'de>, 286 | { 287 | let data = Option::::deserialize(deserializer)?; 288 | 289 | let (id_type, join_data) = match data { 290 | Some(value) => (T::_id(&value), JoinData::QueryResult(value)), 291 | None => return Err(D::Error::custom("Invalid value")), 292 | }; 293 | 294 | Ok(Join { 295 | id: id_type, 296 | data: join_data, 297 | }) 298 | } 299 | } 300 | -------------------------------------------------------------------------------- /core/src/lib.rs: -------------------------------------------------------------------------------- 1 | pub use self::error::{Error, Result}; 2 | pub use self::query_builder::SelectQueryBuilder; 3 | pub use futures::future::BoxFuture; 4 | pub use join::Join; 5 | 6 | pub mod config; 7 | mod error; 8 | pub mod insert; 9 | pub mod join; 10 | pub mod model; 11 | pub mod query_builder; 12 | pub mod schema; 13 | -------------------------------------------------------------------------------- /core/src/model.rs: -------------------------------------------------------------------------------- 1 | /// A model is a struct that represents a row in a relational database table. 2 | /// Using the `[derive(ormlite::Model)]` macro, it will acquire the following traits: 3 | /// 4 | /// - `ormlite::Model`, giving it direct database access, e.g. `insert`, `update_all_fields`, etc. 5 | /// - `ormlite::HasModelBuilder`, letting it build partials, so you can insert or update some 6 | /// fields instead of all of them at once, e.g. `model.name("John").update()` 7 | /// - `ormlite::TableMeta`, which you typically don't use directly, but provides table metadata 8 | /// (e.g. table name) 9 | /// 10 | use crate::Result; 11 | use crate::SelectQueryBuilder; 12 | use futures::future::BoxFuture; 13 | 14 | /// A struct that is `Insert` is expected to have same fields as the model, excluding fields 15 | /// that have sane defaults at the database level. Concretely, if you have a Person struct: 16 | /// #[derive(ormlite::Model)] 17 | /// struct Person { 18 | /// id: i32, 19 | /// name: String, 20 | /// age: i32, 21 | /// } 22 | /// 23 | /// Then the `Insert` struct looks like: 24 | /// struct InsertPerson { 25 | /// name: String, 26 | /// age: i32, 27 | /// } 28 | pub trait Insert 29 | where 30 | Self: Sized + Send + Sync, 31 | DB: sqlx::Database, 32 | { 33 | type Model; 34 | fn insert<'e, A>(self, conn: A) -> BoxFuture<'e, Result> 35 | where 36 | A: 'e + Send + sqlx::Acquire<'e, Database = DB>; 37 | } 38 | 39 | /// A struct that implements `ModelBuilder` implements the builder pattern for a model. 40 | pub trait ModelBuilder<'a, DB> 41 | where 42 | Self: Sized + Send + Sync, 43 | DB: sqlx::Database, 44 | { 45 | type Model; 46 | 47 | fn insert<'e: 'a, E>(self, db: E) -> BoxFuture<'a, Result> 48 | where 49 | E: 'e + sqlx::Executor<'e, Database = DB>; 50 | 51 | fn update<'e: 'a, E>(self, db: E) -> BoxFuture<'a, Result> 52 | where 53 | E: 'e + sqlx::Executor<'e, Database = DB>; 54 | 55 | /// All fields that will be modified in the query. 56 | fn modified_fields(&self) -> Vec<&'static str>; 57 | 58 | /// Build the model, don't insert or update it. 59 | fn build(self) -> Self::Model; 60 | } 61 | 62 | /// The core trait. a struct that implements `Model` can also implement `HasModelBuilder`, (and is required to implement `Insertable`) 63 | pub trait Model 64 | where 65 | DB: sqlx::Database, 66 | Self: Sized + TableMeta, 67 | { 68 | type ModelBuilder<'a>: ModelBuilder<'a, DB> 69 | where 70 | Self: 'a; 71 | 72 | /// Insert the model into the database. 73 | fn insert<'a, A>(self, conn: A) -> crate::insert::Insertion<'a, A, Self, DB> 74 | where 75 | A: 'a + Send + sqlx::Acquire<'a, Database = DB>, 76 | Self: Send; 77 | 78 | fn insert_many<'e, E>(values: Vec, db: E) -> BoxFuture<'e, Result>> 79 | where 80 | E: 'e + sqlx::Executor<'e, Database = DB>; 81 | 82 | /// `Model` objects can't track what fields are updated, so this method will update all fields. 83 | /// If you want to update only some fields, use `update_partial` instead. 84 | fn update_all_fields<'e, E>(self, db: E) -> BoxFuture<'e, Result> 85 | where 86 | E: 'e + Send + sqlx::Executor<'e, Database = DB>; 87 | 88 | fn delete<'e, E>(self, db: E) -> BoxFuture<'e, Result<()>> 89 | where 90 | E: 'e + sqlx::Executor<'e, Database = DB>; 91 | 92 | /// Get by primary key. 93 | fn fetch_one<'e, 'a, Arg, E>(id: Arg, db: E) -> BoxFuture<'e, Result> 94 | where 95 | 'a: 'e, 96 | E: 'e + sqlx::Executor<'e, Database = DB>, 97 | Arg: 'a + Send + sqlx::Encode<'a, DB> + sqlx::Type; 98 | 99 | /// If query building isn't meeting your needs, use this method to query the table using raw SQL. 100 | fn query(query: &str) -> sqlx::query::QueryAs>; 101 | 102 | /// Create a `SelectQueryBuilder` to build a query. 103 | fn select<'args>() -> SelectQueryBuilder<'args, DB, Self>; 104 | 105 | /// Create a builder-pattern object to update one or more columns. 106 | /// You can also use `update_all_fields` to update all columns. 107 | fn update_partial(&self) -> Self::ModelBuilder<'_>; 108 | 109 | fn builder() -> Self::ModelBuilder<'static>; 110 | } 111 | 112 | pub trait TableMeta { 113 | fn table_name() -> &'static str; 114 | fn table_columns() -> &'static [&'static str]; 115 | fn primary_key() -> Option<&'static str>; 116 | fn primary_key_placeholder_idx() -> Option { 117 | let col = Self::primary_key()?; 118 | Self::table_columns().iter().position(|&c| c == col).map(|i| i + 1) 119 | } 120 | } 121 | -------------------------------------------------------------------------------- /core/src/query_builder/args.rs: -------------------------------------------------------------------------------- 1 | use core::default::Default; 2 | use sqlx::{Arguments, Database, IntoArguments}; 3 | 4 | pub struct QueryBuilderArgs<'q, DB: Database>(pub Box>, usize); 5 | 6 | impl<'q, DB: Database> QueryBuilderArgs<'q, DB> { 7 | pub fn add + sqlx::Type>(&mut self, arg: T) { 8 | self.0.add(arg).unwrap(); 9 | self.1 += 1; 10 | } 11 | 12 | pub fn len(&self) -> usize { 13 | self.1 14 | } 15 | } 16 | 17 | impl<'q, DB: Database> IntoArguments<'q, DB> for QueryBuilderArgs<'q, DB> { 18 | fn into_arguments(self) -> DB::Arguments<'q> { 19 | *self.0 20 | } 21 | } 22 | 23 | impl<'q, DB: Database> Default for QueryBuilderArgs<'q, DB> { 24 | fn default() -> Self { 25 | Self(Box::default(), 0) 26 | } 27 | } 28 | -------------------------------------------------------------------------------- /core/src/query_builder/mod.rs: -------------------------------------------------------------------------------- 1 | pub mod args; 2 | pub mod placeholder; 3 | mod select; 4 | mod util; 5 | pub use args::QueryBuilderArgs; 6 | pub use placeholder::Placeholder; 7 | pub use select::SelectQueryBuilder; 8 | -------------------------------------------------------------------------------- /core/src/query_builder/placeholder.rs: -------------------------------------------------------------------------------- 1 | pub enum Placeholder { 2 | DollarSign(usize), 3 | QuestionMark, 4 | } 5 | 6 | impl Placeholder { 7 | pub fn dollar_sign() -> Self { 8 | Placeholder::DollarSign(1) 9 | } 10 | 11 | pub fn question_mark() -> Self { 12 | Placeholder::QuestionMark 13 | } 14 | } 15 | 16 | impl Iterator for Placeholder { 17 | type Item = String; 18 | 19 | fn next(&mut self) -> Option { 20 | match *self { 21 | Placeholder::DollarSign(ref mut i) => { 22 | let r = Some(format!("${i}")); 23 | *i += 1; 24 | r 25 | } 26 | Placeholder::QuestionMark => Some("?".to_string()), 27 | } 28 | } 29 | } 30 | -------------------------------------------------------------------------------- /core/src/query_builder/select.rs: -------------------------------------------------------------------------------- 1 | use crate::error::{Error, Result}; 2 | use crate::model::Model; 3 | use crate::query_builder::args::QueryBuilderArgs; 4 | use crate::query_builder::{util, Placeholder}; 5 | use sqlmo::{Expr, ToSql}; 6 | 7 | use crate::join::{criteria, select_columns, JoinDescription}; 8 | use sqlmo::{query::Where, Select}; 9 | use sqlx::{Executor, IntoArguments}; 10 | use std::marker::PhantomData; 11 | 12 | pub use sqlmo::query::Direction; 13 | 14 | // Add additional information to the sqlx::Database 15 | pub trait DatabaseMetadata { 16 | fn dialect() -> sqlmo::Dialect; 17 | fn placeholder() -> Placeholder; 18 | } 19 | 20 | #[cfg(feature = "postgres")] 21 | impl DatabaseMetadata for sqlx::postgres::Postgres { 22 | fn dialect() -> sqlmo::Dialect { 23 | sqlmo::Dialect::Postgres 24 | } 25 | 26 | fn placeholder() -> Placeholder { 27 | Placeholder::dollar_sign() 28 | } 29 | } 30 | 31 | #[cfg(feature = "sqlite")] 32 | impl DatabaseMetadata for sqlx::sqlite::Sqlite { 33 | fn dialect() -> sqlmo::Dialect { 34 | sqlmo::Dialect::Sqlite 35 | } 36 | 37 | fn placeholder() -> Placeholder { 38 | Placeholder::question_mark() 39 | } 40 | } 41 | 42 | pub struct SelectQueryBuilder<'args, DB, Model> 43 | where 44 | DB: sqlx::Database, 45 | { 46 | pub query: Select, 47 | arguments: QueryBuilderArgs<'args, DB>, 48 | model: PhantomData, 49 | gen: Placeholder, 50 | } 51 | 52 | impl<'args, DB, M> SelectQueryBuilder<'args, DB, M> 53 | where 54 | M: Sized + Send + Sync + Unpin + for<'r> sqlx::FromRow<'r, DB::Row> + 'static + Model, 55 | DB: sqlx::Database + DatabaseMetadata, 56 | DB::Arguments<'args>: IntoArguments<'args, DB>, 57 | { 58 | pub async fn fetch_all<'executor, E>(self, db: E) -> Result> 59 | where 60 | E: Executor<'executor, Database = DB>, 61 | { 62 | let (text, args) = self.into_query_and_args()?; 63 | let z: &str = &text; 64 | util::query_as_with_recast_lifetime::(z, args) 65 | .fetch_all(db) 66 | .await 67 | .map_err(Error::from) 68 | } 69 | 70 | pub async fn fetch_one<'executor, E>(self, db: E) -> Result 71 | where 72 | E: Executor<'executor, Database = DB>, 73 | { 74 | let (text, args) = self.into_query_and_args()?; 75 | let z: &str = &text; 76 | util::query_as_with_recast_lifetime::(z, args) 77 | .fetch_one(db) 78 | .await 79 | .map_err(Error::from) 80 | } 81 | 82 | pub async fn fetch_optional<'executor, E>(self, db: E) -> Result> 83 | where 84 | E: Executor<'executor, Database = DB>, 85 | { 86 | let (text, args) = self.into_query_and_args()?; 87 | let z: &str = &text; 88 | util::query_as_with_recast_lifetime::(z, args) 89 | .fetch_optional(db) 90 | .await 91 | .map_err(Error::from) 92 | } 93 | 94 | pub fn with(mut self, name: &str, query: &str) -> Self { 95 | self.query = self.query.with_raw(name, query); 96 | self 97 | } 98 | 99 | /// Add a column to the query. Note you typically don't need this, as creating a query from 100 | /// `Model::select` will automatically add that model's columns. 101 | /// 102 | /// # Arguments 103 | /// * `column` - The column to add. Examples: "id", "name", "person.*" 104 | pub fn select(mut self, column: impl Into) -> Self { 105 | self.query = self.query.select_raw(column.into()); 106 | self 107 | } 108 | 109 | /// Add a WHERE clause to the query. 110 | /// Do not use format! to add parameters. Instead, use `?` as the placeholder, and add 111 | /// parameters with [`bind`](Self::bind). 112 | /// 113 | /// Postgres users: You can (and should) use `?` as the placeholder. You might not have a defined 114 | /// numerical order for your parameters, preventing $ syntax. Upon execution, the query 115 | /// builder replaces `?` with `$`. If you need the same parameter multiple times, you should 116 | /// bind it multiple times. Arguments aren't moved, so this doesn't incur a memory cost. If you 117 | /// still want to re-use parameters, you can use $ placeholders. However, don't mix `?` and 118 | /// `$` placeholders, as they will conflict. 119 | /// 120 | /// # Arguments 121 | /// * `clause` - The clause to add. Examples: "id = ?", "name = ?", "person.id = ?" 122 | pub fn where_(mut self, clause: &'static str) -> Self { 123 | self.query = self.query.where_raw(clause); 124 | self 125 | } 126 | 127 | /// Convenience method to add a `WHERE` and bind a value in one call. 128 | pub fn where_bind(mut self, clause: &'static str, value: T) -> Self 129 | where 130 | T: 'args + Send + sqlx::Type + sqlx::Encode<'args, DB>, 131 | { 132 | self.query = self.query.where_raw(clause); 133 | self.arguments.add(value); 134 | self 135 | } 136 | /// Dangerous because it takes a string that could be user crafted. You should prefer `.where_` which 137 | /// takes a &'static str, and pass arguments with `.bind()`. 138 | pub fn dangerous_where(mut self, clause: &str) -> Self { 139 | self.query = self.query.where_raw(clause); 140 | self 141 | } 142 | 143 | pub fn join(mut self, join_description: JoinDescription) -> Self { 144 | match &join_description { 145 | JoinDescription::ManyToOne { 146 | columns, 147 | foreign_table, 148 | field, 149 | foreign_key, 150 | local_column, 151 | } => { 152 | let join = sqlmo::query::Join { 153 | typ: sqlmo::query::JoinType::Left, 154 | table: sqlmo::query::JoinTable::Table { 155 | schema: None, 156 | table: foreign_table.to_string(), 157 | }, 158 | alias: Some(field.to_string()), 159 | criteria: criteria(M::table_name(), local_column, field, foreign_key), 160 | }; 161 | self.query.join.push(join); 162 | self.query.columns.extend(select_columns(columns, field)) 163 | } 164 | } 165 | self 166 | } 167 | 168 | #[doc(hidden)] 169 | #[deprecated(note = "Please use `where_` instead")] 170 | pub fn filter(self, clause: &'static str) -> Self { 171 | self.where_(clause) 172 | } 173 | 174 | /// Add a HAVING clause to the query. 175 | pub fn having(mut self, clause: &str) -> Self { 176 | self.query = self.query.having(Where::Expr(Expr::Raw(clause.to_string()))); 177 | self 178 | } 179 | 180 | /// Add a GROUP BY clause to the query. 181 | /// 182 | /// # Arguments: 183 | /// * `clause`: The GROUP BY clause to add. Examples: "id", "id, date", "1, 2, ROLLUP(3)" 184 | pub fn group_by(mut self, clause: &str) -> Self { 185 | self.query = self.query.group_by(clause); 186 | self 187 | } 188 | 189 | /// Add an ORDER BY clause to the query. 190 | /// 191 | /// # Arguments: 192 | /// * `clause`: The ORDER BY clause to add. "created_at DESC", "id ASC NULLS FIRST" 193 | /// * `direction`: Direction::Asc or Direction::Desc 194 | pub fn order_by(mut self, clause: &str, direction: Direction) -> Self { 195 | self.query = self.query.order_by(clause, direction); 196 | self 197 | } 198 | 199 | pub fn order_asc(mut self, clause: &str) -> Self { 200 | self.query = self.query.order_asc(clause); 201 | self 202 | } 203 | 204 | pub fn order_desc(mut self, clause: &str) -> Self { 205 | self.query = self.query.order_desc(clause); 206 | self 207 | } 208 | 209 | /// Add a limit to the query. 210 | pub fn limit(mut self, limit: usize) -> Self { 211 | self.query = self.query.limit(limit); 212 | self 213 | } 214 | 215 | /// Add an offset to the query. 216 | pub fn offset(mut self, offset: usize) -> Self { 217 | self.query = self.query.offset(offset); 218 | self 219 | } 220 | 221 | /// Bind an argument to the query. 222 | pub fn bind(mut self, value: T) -> Self 223 | where 224 | T: 'args + Send + sqlx::Type + sqlx::Encode<'args, DB>, 225 | { 226 | self.arguments.add(value); 227 | self 228 | } 229 | 230 | pub fn into_query_and_args(mut self) -> Result<(String, QueryBuilderArgs<'args, DB>)> { 231 | let q = self.query.to_sql(DB::dialect()); 232 | let args = self.arguments; 233 | let (q, placeholder_count) = util::replace_placeholders(&q, &mut self.gen)?; 234 | if placeholder_count != args.len() { 235 | return Err(Error::OrmliteError(format!( 236 | "Failing to build query. {} placeholders were found in the query, but \ 237 | {} arguments were provided.", 238 | placeholder_count, 239 | args.len(), 240 | ))); 241 | } 242 | Ok((q, args)) 243 | } 244 | } 245 | 246 | impl<'args, DB: sqlx::Database + DatabaseMetadata, M: Model> Default for SelectQueryBuilder<'args, DB, M> { 247 | fn default() -> Self { 248 | Self { 249 | query: Select::default().from(M::table_name()), 250 | arguments: QueryBuilderArgs::default(), 251 | model: PhantomData, 252 | gen: DB::placeholder(), 253 | } 254 | } 255 | } 256 | -------------------------------------------------------------------------------- /core/src/query_builder/util.rs: -------------------------------------------------------------------------------- 1 | use crate::query_builder::args::QueryBuilderArgs; 2 | use crate::{Error, Result}; 3 | use sqlparser::dialect::GenericDialect; 4 | use sqlparser::tokenizer::{Token, Tokenizer}; 5 | use sqlx::query::QueryAs; 6 | 7 | pub fn replace_placeholders>( 8 | sql: &str, 9 | placeholder_generator: &mut T, 10 | ) -> Result<(String, usize)> { 11 | let mut placeholder_count = 0usize; 12 | let dialect = GenericDialect {}; 13 | // note this lib is inefficient because it's copying strings everywhere, instead 14 | // of using slices and an appropriate lifetime. probably want to swap out the lib at some point 15 | let tokens = Tokenizer::new(&dialect, sql).tokenize()?; 16 | // 16 is arbitrary here. 17 | let mut buf = String::with_capacity(sql.len() + 16); 18 | let mut it = tokens.iter(); 19 | while let Some(tok) = it.next() { 20 | match tok { 21 | Token::Placeholder(_) => { 22 | buf.push_str(&placeholder_generator.next().unwrap()); 23 | placeholder_count += 1; 24 | } 25 | Token::Char(c) => { 26 | match c { 27 | '?' => { 28 | buf.push_str(&placeholder_generator.next().unwrap()); 29 | placeholder_count += 1; 30 | } 31 | '$' => { 32 | let next_tok = it.next(); 33 | if let Some(next_tok) = next_tok { 34 | match next_tok { 35 | Token::Number(text, _) => { 36 | let n = text.parse::().map_err(|_| Error::OrmliteError( 37 | format!("Failed to parse number after a $ during query tokenization. Value was: {text}" 38 | )))?; 39 | buf.push_str(&format!("${next_tok}")); 40 | placeholder_count = std::cmp::max(placeholder_count, n); 41 | } 42 | _ => {} 43 | } 44 | } 45 | } 46 | _ => buf.push(*c), 47 | } 48 | } 49 | _ => buf.push_str(&tok.to_string()), 50 | } 51 | } 52 | Ok((buf, placeholder_count)) 53 | } 54 | 55 | pub(super) fn query_as_with_recast_lifetime<'q, 'r, DB, Model>( 56 | s: &'q str, 57 | args: QueryBuilderArgs<'r, DB>, 58 | ) -> QueryAs<'q, DB, Model, QueryBuilderArgs<'q, DB>> 59 | where 60 | 'r: 'q, 61 | DB: sqlx::Database, 62 | Model: for<'s> sqlx::FromRow<'s, DB::Row>, 63 | { 64 | // unsafe is safe b/c 'r: 'q. Rust isn't smart enough to know that downcasting of traits is safe, because when traits get lifetimes, it doesn't 65 | // know if the lifetime is covariant or contravariant, so it enforces equivalence. See: https://www.reddit.com/r/rust/comments/rox4j9/lifetime_inference_fails_when_lifetime_is_part_of/ 66 | // But we know the trait is implemented by a struct, not a function, so we can do the downcast safely. Yay! 67 | let recast_args = unsafe { std::mem::transmute::<_, QueryBuilderArgs<'q, DB>>(args) }; 68 | sqlx::query_as_with(s, recast_args) 69 | } 70 | 71 | #[cfg(test)] 72 | mod tests { 73 | use super::*; 74 | 75 | use crate::Result; 76 | 77 | #[test] 78 | fn test_replace_placeholders() -> Result<()> { 79 | let mut placeholder_generator = vec!["$1", "$2", "$3"].into_iter().map(|s| s.to_string()); 80 | let (sql, placeholder_count) = replace_placeholders( 81 | "SELECT * FROM users WHERE id = ? OR id = ? OR id = ?", 82 | &mut placeholder_generator, 83 | )?; 84 | assert_eq!(sql, "SELECT * FROM users WHERE id = $1 OR id = $2 OR id = $3"); 85 | assert_eq!(placeholder_count, 3); 86 | Ok(()) 87 | } 88 | 89 | #[test] 90 | fn test_leave_placeholders_alone() -> Result<()> { 91 | let mut placeholder_generator = vec!["$1", "$2", "$3"].into_iter().map(|s| s.to_string()); 92 | let (sql, placeholder_count) = 93 | replace_placeholders("SELECT * FROM users WHERE email = $1", &mut placeholder_generator)?; 94 | assert_eq!(sql, "SELECT * FROM users WHERE email = $1"); 95 | assert_eq!(placeholder_count, 1); 96 | Ok(()) 97 | } 98 | } 99 | -------------------------------------------------------------------------------- /core/src/schema.rs: -------------------------------------------------------------------------------- 1 | use std::collections::HashMap; 2 | use std::path::Path; 3 | use ormlite_attr::{schema_from_filepaths, ColumnMeta, Ident, InnerType}; 4 | use ormlite_attr::ModelMeta; 5 | use ormlite_attr::Type; 6 | use sqlmo::{schema::Column, Constraint, Schema, Table}; 7 | use anyhow::Result as AnyResult; 8 | use crate::config::Config; 9 | 10 | pub fn schema_from_ormlite_project(paths: &[&Path], c: &Config) -> AnyResult { 11 | let mut schema = Schema::default(); 12 | let mut fs_schema = schema_from_filepaths(paths)?; 13 | let primary_key_type: HashMap = fs_schema 14 | .tables 15 | .iter() 16 | .map(|t| { 17 | let pkey_ty = t.pkey.ty.inner_type().clone(); 18 | (t.ident.to_string(), pkey_ty) 19 | }) 20 | .collect(); 21 | for t in &mut fs_schema.tables { 22 | for c in &mut t.table.columns { 23 | // replace alias types with the real type. 24 | let inner = c.ty.inner_type_mut(); 25 | if let Some(f) = fs_schema.type_reprs.get(&inner.ident.to_string()) { 26 | inner.ident = Ident::from(f); 27 | } 28 | // replace join types with the primary key type. 29 | if c.ty.is_join() { 30 | let model_name = c.ty.inner_type_name(); 31 | let pkey = primary_key_type 32 | .get(&model_name) 33 | .expect(&format!("Could not find model {} for join", model_name)); 34 | c.ty = Type::Inner(pkey.clone()); 35 | } 36 | } 37 | } 38 | for table in fs_schema.tables { 39 | let table = Table::from_meta(&table); 40 | schema.tables.push(table); 41 | } 42 | let mut table_names: HashMap = 43 | schema.tables.iter().map(|t| (t.name.clone(), (t.name.clone(), t.primary_key().unwrap().name.clone()))).collect(); 44 | for (alias, real) in &c.table.aliases { 45 | let Some(real) = table_names.get(real) else { 46 | continue; 47 | }; 48 | table_names.insert(alias.clone(), real.clone()); 49 | } 50 | for table in &mut schema.tables { 51 | for column in &mut table.columns { 52 | if column.primary_key { 53 | continue; 54 | } 55 | if column.name.ends_with("_id") || column.name.ends_with("_uuid") { 56 | let Some((model_name, _)) = column.name.rsplit_once('_') else { 57 | continue; 58 | }; 59 | if let Some((t, pkey)) = table_names.get(model_name) { 60 | let constraint = Constraint::foreign_key(t.to_string(), vec![pkey.clone()]); 61 | column.constraint = Some(constraint); 62 | } 63 | } 64 | } 65 | } 66 | Ok(schema) 67 | } 68 | 69 | #[derive(Debug)] 70 | pub struct Options { 71 | pub verbose: bool, 72 | } 73 | 74 | pub trait FromMeta: Sized { 75 | type Input; 76 | fn from_meta(meta: &Self::Input) -> Self; 77 | } 78 | 79 | impl FromMeta for Table { 80 | type Input = ModelMeta; 81 | fn from_meta(model: &ModelMeta) -> Self { 82 | let columns = model 83 | .columns 84 | .iter() 85 | .flat_map(|c| { 86 | if c.skip { 87 | return None; 88 | } 89 | let mut col = Option::::from_meta(c)?; 90 | col.primary_key = model.pkey.name == col.name; 91 | Some(col) 92 | }) 93 | .collect(); 94 | Self { 95 | schema: None, 96 | name: model.name.clone(), 97 | columns, 98 | indexes: vec![], 99 | } 100 | } 101 | } 102 | 103 | impl FromMeta for Option { 104 | type Input = ColumnMeta; 105 | fn from_meta(meta: &Self::Input) -> Self { 106 | let mut ty = Nullable::from_type(&meta.ty)?; 107 | if meta.json { 108 | ty.ty = sqlmo::Type::Jsonb; 109 | } 110 | Some(Column { 111 | name: meta.name.clone(), 112 | typ: ty.ty, 113 | default: None, 114 | nullable: ty.nullable, 115 | primary_key: meta.marked_primary_key, 116 | constraint: None, 117 | }) 118 | } 119 | } 120 | 121 | struct Nullable { 122 | pub ty: sqlmo::Type, 123 | pub nullable: bool, 124 | } 125 | 126 | impl From for Nullable { 127 | fn from(value: sqlmo::Type) -> Self { 128 | Self { 129 | ty: value, 130 | nullable: false, 131 | } 132 | } 133 | } 134 | 135 | impl Nullable { 136 | fn from_type(ty: &Type) -> Option { 137 | use sqlmo::Type::*; 138 | match ty { 139 | Type::Vec(v) => { 140 | if let Type::Inner(p) = v.as_ref() { 141 | if p.ident == "u8" { 142 | return Some(Nullable { 143 | ty: Bytes, 144 | nullable: false, 145 | }); 146 | } 147 | } 148 | let v = Self::from_type(v.as_ref())?; 149 | Some(Nullable { 150 | ty: Array(Box::new(v.ty)), 151 | nullable: false, 152 | }) 153 | } 154 | Type::Inner(p) => { 155 | let ident = p.ident.to_string(); 156 | let ty = match ident.as_str() { 157 | // signed 158 | "i8" => I16, 159 | "i16" => I16, 160 | "i32" => I32, 161 | "i64" => I64, 162 | "i128" => Decimal, 163 | "isize" => I64, 164 | // unsigned 165 | "u8" => I16, 166 | "u16" => I32, 167 | "u32" => I64, 168 | // Turns out postgres doesn't support u64. 169 | "u64" => Decimal, 170 | "u128" => Decimal, 171 | "usize" => Decimal, 172 | // float 173 | "f32" => F32, 174 | "f64" => F64, 175 | // bool 176 | "bool" => Boolean, 177 | // string 178 | "String" => Text, 179 | "str" => Text, 180 | // date 181 | "DateTime" => DateTime, 182 | "NaiveDate" => Date, 183 | "NaiveTime" => DateTime, 184 | "NaiveDateTime" => DateTime, 185 | // decimal 186 | "Decimal" => Decimal, 187 | // uuid 188 | "Uuid" => Uuid, 189 | // json 190 | "Json" => Jsonb, 191 | z => Other(z.to_string()), 192 | }; 193 | Some(Nullable { ty, nullable: false }) 194 | } 195 | Type::Option(o) => { 196 | let inner = Self::from_type(o)?; 197 | Some(Nullable { 198 | ty: inner.ty, 199 | nullable: true, 200 | }) 201 | } 202 | Type::Join(_) => None, 203 | } 204 | } 205 | } 206 | 207 | #[cfg(test)] 208 | mod tests { 209 | use super::*; 210 | use assert_matches::assert_matches; 211 | use ormlite_attr::Type; 212 | use syn::parse_str; 213 | use anyhow::Result; 214 | 215 | #[test] 216 | fn test_convert_type() -> Result<()> { 217 | use sqlmo::Type as SqlType; 218 | let s = Type::from(&parse_str::("String").unwrap()); 219 | assert_matches!(Nullable::from_type(&s).unwrap().ty, SqlType::Text); 220 | let s = Type::from(&parse_str::("u32").unwrap()); 221 | assert_matches!(Nullable::from_type(&s).unwrap().ty, SqlType::I64); 222 | let s = Type::from(&parse_str::("Option").unwrap()); 223 | let s = Nullable::from_type(&s).unwrap(); 224 | assert_matches!(s.ty, SqlType::Text); 225 | assert!(s.nullable); 226 | Ok(()) 227 | } 228 | 229 | #[test] 230 | fn test_support_vec() { 231 | use sqlmo::Type as SqlType; 232 | let s = Type::from(&parse_str::("Vec").unwrap()); 233 | let SqlType::Array(inner) = Nullable::from_type(&s).unwrap().ty else { 234 | panic!("Expected array"); 235 | }; 236 | assert_eq!(*inner, SqlType::Uuid); 237 | } 238 | } 239 | -------------------------------------------------------------------------------- /macro/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "ormlite-macro" 3 | version = "0.23.3" 4 | edition = "2021" 5 | description = "An ORM for people who love SQL. Use the `ormlite` crate, not this one." 6 | authors = ["Kurt Wolf "] 7 | license = "MIT" 8 | repository = "https://github.com/kurtbuilds/ormlite" 9 | 10 | [lib] 11 | proc-macro = true 12 | 13 | [features] 14 | postgres = [] 15 | sqlite = [] 16 | mysql = [] 17 | runtime-tokio-rustls = ["sqlx/runtime-tokio-rustls"] 18 | 19 | default-sqlite = ["sqlite"] 20 | default-postgres = ["postgres"] 21 | default-mysql = ["mysql"] 22 | 23 | [dependencies] 24 | syn.workspace = true 25 | quote.workspace = true 26 | proc-macro2.workspace = true 27 | ormlite-core.workspace = true 28 | ormlite-attr.workspace = true 29 | sqlx.workspace = true 30 | itertools.workspace = true 31 | convert_case.workspace = true 32 | -------------------------------------------------------------------------------- /macro/Justfile: -------------------------------------------------------------------------------- 1 | set dotenv-load := false 2 | 3 | help: 4 | @just --list --unsorted 5 | 6 | clean: 7 | cargo clean 8 | 9 | build: 10 | cargo build 11 | alias b := build 12 | 13 | run *args: 14 | cargo run {{args}} 15 | alias r := run 16 | 17 | release: 18 | cargo build --release 19 | 20 | install: 21 | cargo install --path . 22 | 23 | bootstrap: 24 | cargo install cargo-edit 25 | 26 | test *args: 27 | cargo test {{args}} -F runtime-tokio-rustls,sqlite 28 | 29 | check: 30 | cargo check 31 | alias c := check 32 | 33 | fix: 34 | cargo clippy --fix 35 | 36 | bench: 37 | cargo criterion --features bench 38 | 39 | # Bump version. level=major,minor,patch 40 | version level: 41 | git diff-index --exit-code HEAD > /dev/null || ! echo You have untracked changes. Commit your changes before bumping the version. 42 | cargo set-version --bump {{level}} 43 | cargo update # This bumps Cargo.lock 44 | VERSION=$(rg "version = \"([0-9.]+)\"" -or '$1' Cargo.toml | head -n1) && \ 45 | git commit -am "Bump version {{level}} to $VERSION" && \ 46 | git tag v$VERSION && \ 47 | git push origin v$VERSION 48 | git push 49 | 50 | publish: 51 | cargo publish 52 | 53 | patch: test 54 | just version patch 55 | just publish 56 | 57 | expand n: 58 | @echo $(dye -c INFO) Expandable targets are defined as [[bin]] in Cargo.toml 59 | cargo expand --bin {{n}} 60 | -------------------------------------------------------------------------------- /macro/README.md: -------------------------------------------------------------------------------- 1 | Please see [`ormlite`](https://crates.io/crates/ormlite). 2 | -------------------------------------------------------------------------------- /macro/src/codegen.rs: -------------------------------------------------------------------------------- 1 | pub mod common; 2 | pub mod from_row; 3 | pub mod insert; 4 | pub mod insert_model; 5 | pub mod into_arguments; 6 | pub mod join_description; 7 | pub mod meta; 8 | pub mod model; 9 | pub mod model_builder; 10 | #[cfg(feature = "mysql")] 11 | #[cfg_attr(docsrs, doc(cfg(feature = "mysql")))] 12 | pub mod mysql; 13 | #[cfg(feature = "postgres")] 14 | #[cfg_attr(docsrs, doc(cfg(feature = "postgres")))] 15 | pub mod postgres; 16 | pub mod select; 17 | #[cfg(feature = "sqlite")] 18 | #[cfg_attr(docsrs, doc(cfg(feature = "sqlite")))] 19 | pub mod sqlite; 20 | pub mod update; 21 | -------------------------------------------------------------------------------- /macro/src/codegen/common.rs: -------------------------------------------------------------------------------- 1 | use crate::MetadataCache; 2 | use itertools::Itertools; 3 | use ormlite_attr::ColumnMeta; 4 | use ormlite_attr::Ident; 5 | use ormlite_attr::ModelMeta; 6 | use ormlite_attr::TableMeta; 7 | use ormlite_attr::{InnerType, Type}; 8 | use ormlite_core::query_builder::Placeholder; 9 | use proc_macro2::TokenStream; 10 | use quote::{quote, ToTokens}; 11 | use std::borrow::Cow; 12 | 13 | pub fn generate_conditional_bind(c: &ColumnMeta) -> TokenStream { 14 | let name = &c.ident; 15 | if c.is_join() { 16 | quote! { 17 | if let Some(value) = self.#name { 18 | q = q.bind(value._id()); 19 | } 20 | } 21 | } else if c.json { 22 | if c.is_option() { 23 | quote! { 24 | if let Some(value) = self.#name { 25 | q = q.bind(value.map(::ormlite::types::Json)); 26 | } 27 | } 28 | } else { 29 | quote! { 30 | if let Some(value) = self.#name { 31 | q = q.bind(::ormlite::types::Json(value)); 32 | } 33 | } 34 | } 35 | } else { 36 | quote! { 37 | if let Some(value) = self.#name { 38 | q = q.bind(value); 39 | } 40 | } 41 | } 42 | } 43 | 44 | /// bool whether the given type is `String` 45 | fn ty_is_string(ty: &syn::Type) -> bool { 46 | let p = match ty { 47 | syn::Type::Path(p) => p, 48 | _ => return false, 49 | }; 50 | p.path.segments.last().map(|s| s.ident == "String").unwrap_or(false) 51 | } 52 | 53 | fn recursive_primitive_types_ty<'a>(ty: &'a Type, cache: &'a MetadataCache) -> Vec> { 54 | match ty { 55 | Type::Option(ty) => recursive_primitive_types_ty(ty, cache), 56 | Type::Vec(ty) => { 57 | let inner = recursive_primitive_types_ty(ty, cache); 58 | let inner = inner.into_iter().next().expect("Vec must have inner type"); 59 | let inner: InnerType = inner.into_owned(); 60 | vec![Cow::Owned(InnerType { 61 | path: vec![], 62 | ident: Ident::from("Vec"), 63 | args: Some(Box::new(inner)), 64 | })] 65 | } 66 | Type::Inner(p) => vec![Cow::Borrowed(p)], 67 | Type::Join(j) => { 68 | let joined = cache.get(&j.inner_type_name()).expect("Join type not found"); 69 | recursive_primitive_types(joined, cache) 70 | } 71 | } 72 | } 73 | 74 | fn recursive_primitive_types<'a>(table: &'a ModelMeta, cache: &'a MetadataCache) -> Vec> { 75 | table 76 | .columns 77 | .iter() 78 | .map(|c| recursive_primitive_types_ty(&c.ty, cache)) 79 | .flatten() 80 | .collect() 81 | } 82 | 83 | pub(crate) fn table_primitive_types<'a>(attr: &'a TableMeta, cache: &'a MetadataCache) -> Vec> { 84 | attr.columns 85 | .iter() 86 | .filter(|c| !c.skip) 87 | .filter(|c| !c.json) 88 | .map(|c| recursive_primitive_types_ty(&c.ty, cache)) 89 | .flatten() 90 | .unique() 91 | .collect() 92 | } 93 | 94 | pub fn from_row_bounds<'a>( 95 | db: &dyn OrmliteCodegen, 96 | attr: &'a TableMeta, 97 | cache: &'a MetadataCache, 98 | ) -> impl Iterator + 'a { 99 | let database = db.database_ts(); 100 | table_primitive_types(attr, cache).into_iter().map(move |ty| { 101 | quote! { 102 | #ty: ::ormlite::decode::Decode<'a, #database>, 103 | #ty: ::ormlite::types::Type<#database>, 104 | } 105 | }) 106 | } 107 | 108 | fn is_vec(p: &syn::Path) -> bool { 109 | let Some(segment) = p.segments.last() else { 110 | return false; 111 | }; 112 | segment.ident == "Vec" 113 | } 114 | 115 | /// Used to bind fields to the query upon insertion, update, etc. 116 | /// Assumed Bindings: 117 | /// - `model`: model struct 118 | /// - `q`: sqlx query 119 | pub fn insertion_binding(c: &ColumnMeta) -> TokenStream { 120 | let name = &c.ident; 121 | if c.is_join() { 122 | quote! { 123 | q = q.bind(#name._id()); 124 | } 125 | } else if c.json { 126 | if c.is_option() { 127 | quote! { 128 | q = q.bind(model.#name.map(::ormlite::types::Json)); 129 | } 130 | } else { 131 | quote! { 132 | q = q.bind(::ormlite::types::Json(model.#name)); 133 | } 134 | } 135 | } else { 136 | quote! { 137 | q = q.bind(model.#name); 138 | } 139 | } 140 | } 141 | 142 | pub trait OrmliteCodegen { 143 | fn dialect_ts(&self) -> TokenStream; 144 | fn database_ts(&self) -> TokenStream; 145 | fn placeholder_ts(&self) -> TokenStream; 146 | // A placeholder that works at the phase when its invoked (e.g. during comp time, it can be used. 147 | // Compare to placeholder_ts, which is just the tokens of a placeholder, and therefore can't be "used" until runtime. 148 | fn placeholder(&self) -> Placeholder; 149 | fn row(&self) -> TokenStream; 150 | } 151 | -------------------------------------------------------------------------------- /macro/src/codegen/from_row.rs: -------------------------------------------------------------------------------- 1 | use crate::codegen::common::{from_row_bounds, OrmliteCodegen}; 2 | use crate::MetadataCache; 3 | use ormlite_attr::Ident; 4 | use ormlite_attr::TableMeta; 5 | use ormlite_attr::{ColumnMeta, Type}; 6 | use proc_macro2::TokenStream; 7 | use quote::quote; 8 | 9 | pub fn impl_FromRow(db: &dyn OrmliteCodegen, attr: &TableMeta, cache: &MetadataCache) -> TokenStream { 10 | let bounds = from_row_bounds(db, attr, cache); 11 | let row = db.row(); 12 | 13 | let prefix_branches = attr.columns.iter().filter(|&c| c.is_join_one()).map(|c| { 14 | let name = &c.ident.to_string(); 15 | let iden = &c.ident; 16 | let meta = cache 17 | .get(c.joined_struct_name().unwrap().as_str()) 18 | .expect("Joined struct not found"); 19 | let prefixed_columns = meta.database_columns().map(|c| format!("__{}__{}", iden, c.ident)); 20 | let path = c.joined_model(); 21 | let result = quote! { 22 | #path::from_row_using_aliases(row, &[ 23 | #( 24 | #prefixed_columns, 25 | )* 26 | ])? 27 | }; 28 | quote! { 29 | #name => { 30 | model.#iden = ::ormlite::model::Join::_query_result(#result); 31 | } 32 | } 33 | }); 34 | 35 | let field_names = attr.database_columns().map(|c| &c.name); 36 | 37 | let map_join = if attr.columns.iter().any(|c| c.is_join()) { 38 | quote! { 39 | let mut prefixes = ::ormlite::Row::columns(row).iter().filter_map(|c| { 40 | let name = ::ormlite::Column::name(c); 41 | if name.starts_with("__") { 42 | name.rsplitn(2, "__").last().map(|s| &s[2..]) 43 | } else { 44 | None 45 | } 46 | }) 47 | .collect::>(); 48 | prefixes.sort(); 49 | prefixes.dedup(); 50 | for prefix in prefixes { 51 | match prefix { 52 | #( 53 | #prefix_branches 54 | )* 55 | _ => { 56 | return Err(::ormlite::SqlxError::Decode( 57 | Box::new(::ormlite::Error::OrmliteError(format!("Unknown column prefix: {}", prefix))), 58 | )); 59 | } 60 | } 61 | } 62 | } 63 | } else { 64 | TokenStream::new() 65 | }; 66 | let model = &attr.ident; 67 | quote! { 68 | impl<'a> ::ormlite::model::FromRow<'a, #row> for #model 69 | where 70 | #( 71 | #bounds 72 | )* 73 | { 74 | fn from_row(row: &'a #row) -> ::std::result::Result { 75 | #[allow(unused_mut)] 76 | let mut model = Self::from_row_using_aliases(row, &[ 77 | #( 78 | #field_names, 79 | )* 80 | ])?; 81 | #map_join 82 | Ok(model) 83 | } 84 | } 85 | } 86 | } 87 | 88 | pub fn impl_from_row_using_aliases( 89 | db: &dyn OrmliteCodegen, 90 | attr: &TableMeta, 91 | metadata_cache: &MetadataCache, 92 | ) -> TokenStream { 93 | let row = db.row(); 94 | let fields = attr.all_fields(); 95 | let bounds = from_row_bounds(db, attr, &metadata_cache); 96 | let mut incrementer = 0usize..; 97 | let columns = attr 98 | .columns 99 | .iter() 100 | .map(|c| { 101 | if c.skip { 102 | let id = &c.ident; 103 | return quote! { 104 | let #id = Default::default(); 105 | }; 106 | } 107 | let index = incrementer.next().unwrap(); 108 | let get = quote! { aliases[#index] }; 109 | from_row_for_column(get, c) 110 | }) 111 | .collect::>(); 112 | 113 | let model = &attr.ident; 114 | quote! { 115 | impl #model { 116 | pub fn from_row_using_aliases<'a>(row: &'a #row, aliases: &'a [&str]) -> ::std::result::Result 117 | where 118 | #( 119 | #bounds 120 | )* 121 | { 122 | #( 123 | #columns 124 | )* 125 | Ok(Self { #(#fields,)* }) 126 | } 127 | } 128 | } 129 | } 130 | 131 | /// `name` renames the column. Can pass `col.column_name` if it's not renamed. 132 | pub fn from_row_for_column(get_value: TokenStream, col: &ColumnMeta) -> TokenStream { 133 | let id = &col.ident; 134 | let ty = &col.ty; 135 | if col.is_join() { 136 | let id_id = Ident::from(format!("{}_id", id)); 137 | quote! { 138 | let #id_id: <#ty as ::ormlite::model::JoinMeta>::IdType = ::ormlite::Row::try_get(row, #get_value)?; 139 | let #id = ::ormlite::model::Join::new_with_id(#id_id); 140 | } 141 | } else if col.json { 142 | if let Type::Option(inner) = ty { 143 | quote! { 144 | let #id: Option<::ormlite::types::Json<#inner>> = ::ormlite::Row::try_get(row, #get_value)?; 145 | let #id = #id.map(|j| j.0); 146 | } 147 | } else { 148 | quote! { 149 | let #id: ::ormlite::types::Json<#ty> = ::ormlite::Row::try_get(row, #get_value)?; 150 | let #id = #id.0; 151 | } 152 | } 153 | } else { 154 | quote! { 155 | let #id: #ty = ::ormlite::Row::try_get(row, #get_value)?; 156 | } 157 | } 158 | } 159 | -------------------------------------------------------------------------------- /macro/src/codegen/insert.rs: -------------------------------------------------------------------------------- 1 | use itertools::Itertools; 2 | use crate::codegen::common::{generate_conditional_bind, insertion_binding, OrmliteCodegen}; 3 | use crate::MetadataCache; 4 | use ormlite_attr::ColumnMeta; 5 | use ormlite_attr::Ident; 6 | use ormlite_attr::ModelMeta; 7 | use ormlite_attr::TableMeta; 8 | use ormlite_attr::Type; 9 | use proc_macro2::TokenStream; 10 | use quote::quote; 11 | 12 | pub fn impl_Model__insert(db: &dyn OrmliteCodegen, attr: &ModelMeta, metadata_cache: &MetadataCache) -> TokenStream { 13 | let box_future = crate::util::box_fut_ts(); 14 | let mut placeholder = db.placeholder(); 15 | let db = db.database_ts(); 16 | let table = &attr.name; 17 | let params = attr.database_columns().map(|c| { 18 | if attr.pkey.name == c.name { 19 | placeholder.next().unwrap() 20 | } else if c.has_database_default { 21 | "DEFAULT".to_string() 22 | } else { 23 | placeholder.next().unwrap() 24 | } 25 | }); 26 | 27 | let query_bindings = attr 28 | .database_columns() 29 | .filter(|c| attr.pkey.name == c.name || !c.has_database_default) 30 | .map(|c| insertion_binding(c)); 31 | 32 | let insert_join = attr.many_to_one_joins().map(|c| insert_join(c)); 33 | 34 | let late_bind = attr.many_to_one_joins().map(|c| { 35 | let id = &c.ident; 36 | quote! { 37 | model.#id = #id; 38 | } 39 | }); 40 | 41 | quote! { 42 | #[allow(unused_mut)] 43 | fn insert<'a, A>(mut self, conn: A) -> ::ormlite::__private::Insertion<'a, A, Self, #db> 44 | where 45 | A: 'a + Send + ::ormlite::Acquire<'a, Database=#db> 46 | { 47 | ::ormlite::__private::Insertion { 48 | acquire: conn, 49 | model: self, 50 | closure: Box::new(|conn, mut model, query| { 51 | Box::pin(async move { 52 | let mut conn = conn.acquire().await?; 53 | #( 54 | #insert_join 55 | )* 56 | let mut q = ::ormlite::query_as(&query); 57 | #( 58 | #query_bindings 59 | )* 60 | let mut model: Self = q.fetch_one(&mut *conn).await?; 61 | #( 62 | #late_bind 63 | )* 64 | Ok(model) 65 | }) 66 | }), 67 | insert: ::ormlite::__private::Insert::new(#table) 68 | .columns(::table_columns()) 69 | .values(::ormlite::__private::Values::from([#(#params,)*].as_slice())) 70 | .returning(::table_columns()), 71 | _db: ::std::marker::PhantomData, 72 | } 73 | } 74 | } 75 | } 76 | 77 | pub fn impl_ModelBuilder__insert(db: &dyn OrmliteCodegen, attr: &TableMeta) -> TokenStream { 78 | let box_future = crate::util::box_fut_ts(); 79 | let placeholder = db.placeholder_ts(); 80 | let db = db.database_ts(); 81 | let query = format!("INSERT INTO \"{}\" ({{}}) VALUES ({{}}) RETURNING *", attr.name); 82 | 83 | let bind_parameters = attr.database_columns().map(generate_conditional_bind); 84 | 85 | quote! { 86 | fn insert<'e: 'a, E>(self, db: E) -> #box_future<'a, ::ormlite::Result> 87 | where 88 | E: 'e +::ormlite::Executor<'e, Database = #db>, 89 | { 90 | Box::pin(async move { 91 | let mut placeholder = #placeholder; 92 | let set_fields = self.modified_fields(); 93 | let query = format!( 94 | #query, 95 | set_fields.join(", "), 96 | set_fields.iter().map(|_| placeholder.next().unwrap()).collect::>().join(", "), 97 | ); 98 | let mut q = ::ormlite::query_as::<#db, Self::Model>(&query); 99 | #(#bind_parameters)* 100 | let model = q.fetch_one(db).await?; 101 | Ok(model) 102 | }) 103 | } 104 | } 105 | } 106 | 107 | pub fn impl_InsertModel(db: &dyn OrmliteCodegen, meta: &ModelMeta) -> TokenStream { 108 | let Some(insert_struct) = &meta.insert_struct else { 109 | return TokenStream::new(); 110 | }; 111 | impl_Insert(db, meta, insert_struct, &meta.ident) 112 | } 113 | 114 | pub fn impl_Insert(db: &dyn OrmliteCodegen, meta: &TableMeta, model: &Ident, returns: &Ident) -> TokenStream { 115 | let box_future = crate::util::box_fut_ts(); 116 | let mut placeholder = db.placeholder(); 117 | let db = db.database_ts(); 118 | let fields = meta 119 | .database_columns() 120 | .filter(|&c| !c.has_database_default) 121 | .map(|c| c.name.clone()) 122 | .collect::>() 123 | .join(","); 124 | let placeholders = meta 125 | .database_columns() 126 | .filter(|&c| !c.has_database_default) 127 | .map(|_| placeholder.next().unwrap()) 128 | .collect::>() 129 | .join(","); 130 | let query = format!( 131 | "INSERT INTO \"{}\" ({}) VALUES ({}) RETURNING *", 132 | meta.name, fields, placeholders, 133 | ); 134 | let query_bindings = meta.database_columns().filter(|&c| !c.has_database_default).map(|c| { 135 | if let Some(rust_default) = &c.rust_default { 136 | let default: syn::Expr = syn::parse_str(&rust_default).expect("Failed to parse default_value"); 137 | quote! { 138 | q = q.bind(#default); 139 | } 140 | } else { 141 | insertion_binding(c) 142 | } 143 | }); 144 | 145 | let insert_join = meta.many_to_one_joins().map(|c| insert_join(c)); 146 | 147 | let late_bind = meta.many_to_one_joins().map(|c| { 148 | let id = &c.ident; 149 | quote! { 150 | model.#id = #id; 151 | } 152 | }); 153 | 154 | quote! { 155 | impl ::ormlite::model::Insert<#db> for #model { 156 | type Model = #returns; 157 | 158 | #[allow(unused_mut)] 159 | fn insert<'a, A>(self, db: A) -> #box_future<'a, ::ormlite::Result> 160 | where 161 | A: 'a + Send + ::ormlite::Acquire<'a, Database = #db>, 162 | { 163 | Box::pin(async move { 164 | let mut conn = db.acquire().await?; 165 | let mut q =::ormlite::query_as::<#db, Self::Model>(#query); 166 | let mut model = self; 167 | #(#insert_join)* 168 | #(#query_bindings)* 169 | let mut model: #returns = q.fetch_one(&mut *conn).await?; 170 | #(#late_bind)* 171 | ::ormlite::Result::::Ok(model) 172 | }) 173 | } 174 | } 175 | } 176 | } 177 | 178 | /// Insert joined structs 179 | /// Assumed bindings: 180 | /// - `model`: model struct 181 | /// - `#id`: Other code relies on this binding being created 182 | pub fn insert_join(c: &ColumnMeta) -> TokenStream { 183 | let id = &c.ident; 184 | let joined_ty = c.ty.joined_type().unwrap(); 185 | 186 | let preexisting = match joined_ty { 187 | Type::Option(joined_ty) => { 188 | quote! { 189 | if let Some(id) = model.#id._id() { 190 | #joined_ty::fetch_one(id, &mut *conn).await? 191 | } else { 192 | None 193 | } 194 | } 195 | } 196 | joined_ty => { 197 | quote! { 198 | #joined_ty::fetch_one(model.#id._id(), &mut *conn).await? 199 | } 200 | } 201 | }; 202 | 203 | quote! { 204 | let #id = if let Some(modification) = model.#id._take_modification() { 205 | match modification 206 | .insert(&mut *conn) 207 | .on_conflict(::ormlite::query_builder::OnConflict::Ignore) 208 | .await { 209 | Ok(model) => Join::_query_result(model), 210 | Err(::ormlite::Error::SqlxError(::ormlite::SqlxError::RowNotFound)) => { 211 | let preexisting = #preexisting; 212 | Join::_query_result(preexisting) 213 | }, 214 | Err(e) => return Err(e), 215 | } 216 | } else { 217 | model.#id 218 | }; 219 | } 220 | } 221 | 222 | pub fn impl_Model__insert_many(db: &dyn OrmliteCodegen, meta: &ModelMeta, _mc: &MetadataCache) -> TokenStream { 223 | let box_future = crate::util::box_fut_ts(); 224 | let placeholder = db.placeholder_ts(); 225 | let dialect = db.dialect_ts(); 226 | let db = db.database_ts(); 227 | 228 | let query_bindings = meta.database_columns().map(|c| { 229 | if let Some(rust_default) = &c.rust_default { 230 | let default: syn::Expr = syn::parse_str(&rust_default).expect("Failed to parse default_value"); 231 | quote! { 232 | q = q.bind(#default); 233 | } 234 | } else if c.is_join() { 235 | let name = &c.ident; 236 | quote! { 237 | q = q.bind(model.#name._id()); 238 | } 239 | } else { 240 | insertion_binding(c) 241 | } 242 | }).collect_vec(); 243 | 244 | quote! { 245 | fn insert_many<'e, E>(values: Vec, db: E) -> #box_future<'e, ::ormlite::Result>> 246 | where 247 | E: 'e + ::ormlite::Executor<'e, Database = #db>, 248 | { 249 | Box::pin(async move { 250 | let table = ::table_name(); 251 | let columns = ::table_columns(); 252 | let mut sql_values = ::ormlite::__private::Values::Values(Vec::new()); 253 | for _ in 0..values.len() { 254 | let mut value = ::ormlite::__private::Value::new(); 255 | value = value.placeholders(columns.len(), #dialect); 256 | sql_values = sql_values.value(value); 257 | } 258 | let sql = ::ormlite::__private::Insert::new(table) 259 | .columns(columns) 260 | .values(sql_values) 261 | .returning(columns); 262 | let sql = ::ormlite::__private::ToSql::to_sql(&sql, #dialect); 263 | let mut q = ::ormlite::query_as::<#db, Self>(&sql); 264 | for model in values { 265 | #(#query_bindings)* 266 | } 267 | q.fetch_all(db).await.map_err(Into::into) 268 | }) 269 | } 270 | } 271 | } -------------------------------------------------------------------------------- /macro/src/codegen/insert_model.rs: -------------------------------------------------------------------------------- 1 | use itertools::Itertools; 2 | use ormlite_attr::Ident; 3 | use ormlite_attr::ModelMeta; 4 | use proc_macro2::TokenStream; 5 | use quote::quote; 6 | use syn::DeriveInput; 7 | 8 | pub fn struct_InsertModel(ast: &DeriveInput, attr: &ModelMeta) -> TokenStream { 9 | let Some(insert_model) = &attr.insert_struct else { 10 | return quote! {}; 11 | }; 12 | let vis = &ast.vis; 13 | let struct_fields = attr.columns.iter().filter(|c| !c.is_default()).map(|c| { 14 | let id = &c.ident; 15 | let ty = &c.ty; 16 | quote! { 17 | pub #id: #ty 18 | } 19 | }); 20 | if let Some(extra_derives) = &attr.extra_derives { 21 | quote! { 22 | #[derive(Debug, #(#extra_derives,)*)] 23 | #vis struct #insert_model { 24 | #(#struct_fields,)* 25 | } 26 | } 27 | } else { 28 | quote! { 29 | #[derive(Debug)] 30 | #vis struct #insert_model { 31 | #(#struct_fields,)* 32 | } 33 | } 34 | } 35 | } 36 | -------------------------------------------------------------------------------- /macro/src/codegen/into_arguments.rs: -------------------------------------------------------------------------------- 1 | use crate::codegen::common::OrmliteCodegen; 2 | use ormlite_attr::TableMeta; 3 | use proc_macro2::TokenStream; 4 | use quote::quote; 5 | 6 | /// Allows the model to be turned into arguments. This can be used for bulk insertion. 7 | pub fn impl_IntoArguments(db: &dyn OrmliteCodegen, attr: &TableMeta) -> TokenStream { 8 | let mut placeholder = db.placeholder(); 9 | let db = db.database_ts(); 10 | let model = &attr.ident; 11 | let params = attr.database_columns().map(|c| { 12 | let field = &c.ident; 13 | let value = if c.is_json() { 14 | quote! { 15 | ::ormlite::types::Json(self.#field) 16 | } 17 | } else { 18 | quote! { 19 | self.#field 20 | } 21 | }; 22 | quote! { 23 | ::ormlite::Arguments::add(&mut args, #value).unwrap(); 24 | } 25 | }); 26 | 27 | quote! { 28 | impl<'a> ::ormlite::IntoArguments<'a, #db> for #model { 29 | fn into_arguments(self) -> <#db as ::ormlite::Database>::Arguments<'a> { 30 | let mut args = <#db as ::ormlite::Database>::Arguments::<'a>::default(); 31 | #( 32 | #params 33 | )* 34 | args 35 | } 36 | } 37 | } 38 | } 39 | -------------------------------------------------------------------------------- /macro/src/codegen/join_description.rs: -------------------------------------------------------------------------------- 1 | use crate::MetadataCache; 2 | use ormlite_attr::TableMeta; 3 | use proc_macro2::TokenStream; 4 | use quote::quote; 5 | 6 | pub fn static_join_descriptions(attr: &TableMeta, metadata_cache: &MetadataCache) -> TokenStream { 7 | let joins = attr.columns.iter().filter(|c| c.is_join()).map(|c| { 8 | let join = c.join.as_ref().expect("not a join"); 9 | let field = &c.ident.to_string(); 10 | let column = &c.name; 11 | let struct_name = c.joined_struct_name().unwrap(); 12 | let joined_table = metadata_cache 13 | .get(&struct_name) 14 | .expect(&format!("Did not find metadata for joined struct: {}", struct_name)); 15 | let foreign_table = &joined_table.table.name; 16 | let foreign_key = &joined_table.pkey.name; 17 | 18 | let columns = joined_table.database_columns().map(|c| &c.name); 19 | let body = match join { 20 | ormlite_attr::Join::ManyToOne { column } => { 21 | quote! { 22 | ::ormlite::__private::JoinDescription::ManyToOne { 23 | columns: &[ 24 | #( 25 | #columns, 26 | )* 27 | ], 28 | foreign_table: #foreign_table, 29 | local_column: #column, 30 | field: #field, 31 | foreign_key: #foreign_key, 32 | } 33 | } 34 | } 35 | ormlite_attr::Join::ManyToMany { table } => todo!(), 36 | ormlite_attr::Join::OneToMany { model, field } => todo!(), 37 | }; 38 | let ident = &c.ident; 39 | quote! { 40 | pub fn #ident() -> ::ormlite::__private::JoinDescription { 41 | #body 42 | } 43 | } 44 | }); 45 | 46 | let model = &attr.ident; 47 | quote! { 48 | impl #model { 49 | #( 50 | #joins 51 | )* 52 | } 53 | } 54 | } 55 | -------------------------------------------------------------------------------- /macro/src/codegen/meta.rs: -------------------------------------------------------------------------------- 1 | use ormlite_attr::ModelMeta; 2 | use ormlite_attr::TableMeta; 3 | use proc_macro2::TokenStream; 4 | use quote::quote; 5 | 6 | pub fn impl_TableMeta(table: &TableMeta, pkey: Option<&str>) -> TokenStream { 7 | let ident = &table.ident; 8 | let table_name = &table.name; 9 | let id = match pkey { 10 | Some(id) => quote! { Some(#id) }, 11 | None => quote! { None }, 12 | }; 13 | 14 | let field_names = table.database_columns().map(|c| c.name.to_string()); 15 | 16 | quote! { 17 | impl ::ormlite::model::TableMeta for #ident { 18 | fn table_name() -> &'static str { 19 | #table_name 20 | } 21 | 22 | fn table_columns() -> &'static [&'static str] { 23 | &[#(#field_names,)*] 24 | } 25 | 26 | fn primary_key() -> Option<&'static str> { 27 | #id 28 | } 29 | } 30 | } 31 | } 32 | 33 | pub fn impl_JoinMeta(attr: &ModelMeta) -> TokenStream { 34 | let model = &attr.ident; 35 | let id_type = &attr.pkey.ty; 36 | let id = &attr.pkey.ident; 37 | 38 | quote! { 39 | impl ::ormlite::model::JoinMeta for #model { 40 | type IdType = #id_type; 41 | fn _id(&self) -> Self::IdType { 42 | // clone is identical to Copy for most id types, but lets us use cloneable types like String. 43 | self.#id.clone() 44 | } 45 | } 46 | } 47 | } 48 | -------------------------------------------------------------------------------- /macro/src/codegen/model.rs: -------------------------------------------------------------------------------- 1 | use crate::codegen::common::OrmliteCodegen; 2 | use crate::codegen::insert::{impl_Model__insert, impl_Model__insert_many}; 3 | use crate::codegen::select::impl_Model__select; 4 | use crate::codegen::update::impl_Model__update_all_fields; 5 | use crate::MetadataCache; 6 | use ormlite_attr::ModelMeta; 7 | use proc_macro2::TokenStream; 8 | use quote::quote; 9 | 10 | pub fn impl_Model(db: &dyn OrmliteCodegen, attr: &ModelMeta, metadata_cache: &MetadataCache) -> TokenStream { 11 | let model = &attr.ident; 12 | let partial_model = attr.builder_struct(); 13 | 14 | let impl_Model__insert = impl_Model__insert(db, &attr, metadata_cache); 15 | let impl_Model__insert_many = impl_Model__insert_many(db, &attr, metadata_cache); 16 | let impl_Model__update_all_fields = impl_Model__update_all_fields(db, attr); 17 | let impl_Model__delete = impl_Model__delete(db, attr); 18 | let impl_Model__fetch_one = impl_Model__fetch_one(db, attr); 19 | let impl_Model__select = impl_Model__select(db, &attr.table); 20 | let impl_Model__builder = impl_Model__builder(attr); 21 | let impl_Model__update_partial = impl_Model__update_partial(attr); 22 | let db = db.database_ts(); 23 | quote! { 24 | impl ::ormlite::model::Model<#db> for #model { 25 | type ModelBuilder<'a> = #partial_model<'a> where Self: 'a; 26 | 27 | #impl_Model__insert 28 | #impl_Model__insert_many 29 | #impl_Model__update_all_fields 30 | #impl_Model__delete 31 | #impl_Model__fetch_one 32 | #impl_Model__select 33 | 34 | fn query(query: &str) -> ::ormlite::query::QueryAs<#db, Self, <#db as ::ormlite::Database>::Arguments<'_>> { 35 | ::ormlite::query_as::<_, Self>(query) 36 | } 37 | 38 | #impl_Model__builder 39 | #impl_Model__update_partial 40 | 41 | } 42 | } 43 | } 44 | 45 | pub fn impl_Model__delete(db: &dyn OrmliteCodegen, attr: &ModelMeta) -> TokenStream { 46 | let mut placeholder = db.placeholder(); 47 | 48 | let query = format!( 49 | "DELETE FROM \"{}\" WHERE {} = {}", 50 | attr.name, 51 | attr.pkey.name, 52 | placeholder.next().unwrap() 53 | ); 54 | 55 | let box_future = crate::util::box_fut_ts(); 56 | let db = db.database_ts(); 57 | let id = &attr.pkey.ident; 58 | quote! { 59 | fn delete<'e, E>(self, db: E) -> #box_future<'e, ::ormlite::Result<()>> 60 | where 61 | E: 'e +::ormlite::Executor<'e, Database = #db> 62 | { 63 | Box::pin(async move { 64 | let row =::ormlite::query(#query) 65 | .bind(self.#id) 66 | .execute(db) 67 | .await 68 | .map_err(::ormlite::Error::from)?; 69 | if row.rows_affected() == 0 { 70 | Err(::ormlite::Error::from(::ormlite::SqlxError::RowNotFound)) 71 | } else { 72 | Ok(()) 73 | } 74 | }) 75 | } 76 | } 77 | } 78 | 79 | pub fn impl_Model__fetch_one(db: &dyn OrmliteCodegen, attr: &ModelMeta) -> TokenStream { 80 | let mut placeholder = db.placeholder(); 81 | 82 | let query = format!( 83 | "SELECT * FROM \"{}\" WHERE {} = {}", 84 | attr.name, 85 | attr.pkey.name, 86 | placeholder.next().unwrap() 87 | ); 88 | 89 | let db = db.database_ts(); 90 | let box_future = crate::util::box_fut_ts(); 91 | quote! { 92 | fn fetch_one<'e, 'a, Arg, E>(id: Arg, db: E) -> #box_future<'e, ::ormlite::Result> 93 | where 94 | 'a: 'e, 95 | Arg: 'a + Send + ::ormlite::Encode<'a, #db> + ::ormlite::types::Type<#db>, 96 | E: 'e +::ormlite::Executor<'e, Database = #db> 97 | { 98 | Box::pin(async move { 99 | ::ormlite::query_as::<#db, Self>(#query) 100 | .bind(id) 101 | .fetch_one(db) 102 | .await 103 | .map_err(::ormlite::Error::from) 104 | }) 105 | } 106 | } 107 | } 108 | 109 | pub fn impl_Model__builder(attr: &ModelMeta) -> TokenStream { 110 | let partial_model = &attr.builder_struct(); 111 | quote! { 112 | fn builder() -> #partial_model<'static> { 113 | #partial_model::default() 114 | } 115 | } 116 | } 117 | 118 | pub fn impl_Model__update_partial(attr: &ModelMeta) -> TokenStream { 119 | let partial_model = &attr.builder_struct(); 120 | quote! { 121 | fn update_partial(&self) -> #partial_model<'_> { 122 | let mut partial = #partial_model::default(); 123 | partial.updating = Some(&self); 124 | partial 125 | } 126 | } 127 | } 128 | -------------------------------------------------------------------------------- /macro/src/codegen/model_builder.rs: -------------------------------------------------------------------------------- 1 | use crate::codegen::common::OrmliteCodegen; 2 | use crate::codegen::insert::impl_ModelBuilder__insert; 3 | use crate::codegen::update::impl_ModelBuilder__update; 4 | use ormlite_attr::ModelMeta; 5 | use ormlite_attr::TableMeta; 6 | use proc_macro2::TokenStream; 7 | use quote::quote; 8 | use syn::DeriveInput; 9 | 10 | pub fn struct_ModelBuilder(ast: &DeriveInput, attr: &ModelMeta) -> TokenStream { 11 | let model = &attr.ident; 12 | let model_builder = attr.builder_struct(); 13 | let vis = &ast.vis; 14 | 15 | let settable = attr.database_columns().map(|c| { 16 | let name = &c.ident; 17 | let ty = &c.ty; 18 | quote! { pub #name: std::option::Option<#ty> } 19 | }); 20 | 21 | let methods = attr.database_columns().map(|c| { 22 | let name = &c.ident; 23 | let ty = &c.ty; 24 | if ty.is_string() { 25 | quote! { 26 | pub fn #name>(mut self, #name: T) -> Self { 27 | self.#name = Some(#name.into()); 28 | self 29 | } 30 | } 31 | } else { 32 | quote! { 33 | pub fn #name(mut self, #name: #ty) -> Self { 34 | self.#name = Some(#name); 35 | self 36 | } 37 | } 38 | } 39 | }); 40 | 41 | let fields_none = attr.database_columns().map(|c| { 42 | let name = &c.ident; 43 | quote! { 44 | #name: None 45 | } 46 | }); 47 | 48 | quote! { 49 | #vis struct #model_builder<'a> { 50 | #(#settable,)* 51 | updating: Option<&'a #model>, 52 | } 53 | 54 | impl<'a> std::default::Default for #model_builder<'a> { 55 | fn default() -> Self { 56 | Self { 57 | #(#fields_none,)* 58 | updating: None, 59 | } 60 | } 61 | } 62 | 63 | impl<'a> #model_builder<'a> { 64 | #(#methods)* 65 | 66 | } 67 | } 68 | } 69 | 70 | pub fn impl_ModelBuilder__build(attr: &TableMeta) -> TokenStream { 71 | let unpack = attr.database_columns().map(|c| { 72 | let c = &c.ident; 73 | let msg = format!("Tried to build a model, but the field `{}` was not set.", c); 74 | quote! { let #c = self.#c.expect(#msg); } 75 | }); 76 | 77 | let fields = attr.database_columns().map(|c| &c.ident); 78 | 79 | let skipped_fields = attr.columns.iter().filter(|&c| c.skip).map(|c| { 80 | let id = &c.ident; 81 | quote! { 82 | #id: Default::default() 83 | } 84 | }); 85 | 86 | quote! { 87 | fn build(self) -> Self::Model { 88 | #( #unpack )* 89 | Self::Model { 90 | #( #fields, )* 91 | #( #skipped_fields, )* 92 | } 93 | } 94 | } 95 | } 96 | 97 | pub fn impl_ModelBuilder(db: &dyn OrmliteCodegen, attr: &ModelMeta) -> TokenStream { 98 | let partial_model = attr.builder_struct(); 99 | let model = &attr.ident; 100 | 101 | let impl_ModelBuilder__insert = impl_ModelBuilder__insert(db, &attr.table); 102 | let impl_ModelBuilder__update = impl_ModelBuilder__update(db, attr); 103 | let impl_ModelBuilder__build = impl_ModelBuilder__build(&attr.table); 104 | 105 | let build_modified_fields = attr.database_columns().map(|c| { 106 | let name = &c.ident; 107 | let name_str = &c.name; 108 | quote! { 109 | if self.#name.is_some() { 110 | ret.push(#name_str); 111 | } 112 | } 113 | }); 114 | 115 | let db = db.database_ts(); 116 | quote! { 117 | impl<'a> ::ormlite::model::ModelBuilder<'a, #db> for #partial_model<'a> { 118 | type Model = #model; 119 | #impl_ModelBuilder__insert 120 | #impl_ModelBuilder__update 121 | #impl_ModelBuilder__build 122 | 123 | fn modified_fields(&self) -> Vec<&'static str> { 124 | let mut ret = Vec::new(); 125 | #(#build_modified_fields)* 126 | ret 127 | } 128 | } 129 | } 130 | } 131 | -------------------------------------------------------------------------------- /macro/src/codegen/mysql.rs: -------------------------------------------------------------------------------- 1 | use crate::codegen::common::OrmliteCodegen; 2 | 3 | use ormlite_core::query_builder::Placeholder; 4 | use proc_macro2::TokenStream; 5 | use quote::quote; 6 | 7 | pub struct MysqlBackend {} 8 | 9 | impl OrmliteCodegen for MysqlBackend { 10 | fn dialect_ts(&self) -> TokenStream { 11 | quote! { ::ormlite::__private::Dialect::Mysql } 12 | } 13 | fn database_ts(&self) -> TokenStream { 14 | quote! { ::ormlite::mysql::Mysql } 15 | } 16 | 17 | fn placeholder_ts(&self) -> TokenStream { 18 | quote! { 19 | ::ormlite::query_builder::Placeholder::question_mark() 20 | } 21 | } 22 | 23 | fn placeholder(&self) -> Placeholder { 24 | Placeholder::question_mark() 25 | } 26 | 27 | fn row(&self) -> TokenStream { 28 | quote! { ::ormlite::mysql::MysqlRow } 29 | } 30 | } 31 | -------------------------------------------------------------------------------- /macro/src/codegen/postgres.rs: -------------------------------------------------------------------------------- 1 | use crate::codegen::common::{from_row_bounds, OrmliteCodegen}; 2 | use crate::MetadataCache; 3 | use ormlite_core::query_builder::Placeholder; 4 | use proc_macro2::TokenStream; 5 | use quote::quote; 6 | 7 | pub struct PostgresBackend; 8 | 9 | impl OrmliteCodegen for PostgresBackend { 10 | fn dialect_ts(&self) -> TokenStream { 11 | quote! { ::ormlite::__private::Dialect::Postgres } 12 | } 13 | fn database_ts(&self) -> TokenStream { 14 | quote! { ::ormlite::postgres::Postgres } 15 | } 16 | 17 | fn placeholder_ts(&self) -> TokenStream { 18 | quote! { 19 | ::ormlite::query_builder::Placeholder::dollar_sign() 20 | } 21 | } 22 | 23 | fn placeholder(&self) -> Placeholder { 24 | Placeholder::dollar_sign() 25 | } 26 | 27 | fn row(&self) -> TokenStream { 28 | quote! { 29 | ::ormlite::postgres::PgRow 30 | } 31 | } 32 | } 33 | 34 | #[cfg(test)] 35 | mod test { 36 | use super::*; 37 | use ormlite_attr::ttype::InnerType; 38 | use ormlite_attr::ColumnMeta; 39 | use ormlite_attr::ModelMeta; 40 | 41 | #[test] 42 | fn test_all_bounds() { 43 | let db = PostgresBackend; 44 | let mut cache = MetadataCache::new(); 45 | let table = ModelMeta::mock( 46 | "user", 47 | vec![ 48 | ColumnMeta::mock("id", "u32"), 49 | ColumnMeta::mock("name", "String"), 50 | ColumnMeta::mock("organization_id", "u32"), 51 | ColumnMeta::mock_join("organization", "Organization"), 52 | ], 53 | ); 54 | cache.insert("User".to_string(), table.clone()); 55 | let table = ModelMeta::mock( 56 | "organization", 57 | vec![ 58 | ColumnMeta::mock("id", "u32"), 59 | ColumnMeta::mock("name", "String"), 60 | ColumnMeta::mock("is_active", "bool"), 61 | ], 62 | ); 63 | cache.insert("Organization".to_string(), table.clone()); 64 | 65 | let types_for_bound = crate::codegen::common::table_primitive_types(&table.table, &cache); 66 | let types_for_bound = types_for_bound.into_iter().map(|c| c.into_owned()).collect::>(); 67 | assert_eq!( 68 | types_for_bound, 69 | vec![ 70 | InnerType::mock("u32"), 71 | InnerType::mock("String"), 72 | InnerType::mock("bool"), 73 | ] 74 | ); 75 | let bounds = from_row_bounds(&db, &table.table, &cache); 76 | let bounds = quote! { 77 | #(#bounds)* 78 | }; 79 | assert_eq!( 80 | bounds.to_string(), 81 | "u32 : :: ormlite :: decode :: Decode < 'a , R :: Database > , ".to_owned() 82 | + "u32 : :: ormlite :: types :: Type < R :: Database > , " 83 | + "String : :: ormlite :: decode :: Decode < 'a , R :: Database > , " 84 | + "String : :: ormlite :: types :: Type < R :: Database > , " 85 | + "bool : :: ormlite :: decode :: Decode < 'a , R :: Database > , " 86 | + "bool : :: ormlite :: types :: Type < R :: Database > ," 87 | ); 88 | } 89 | } 90 | -------------------------------------------------------------------------------- /macro/src/codegen/select.rs: -------------------------------------------------------------------------------- 1 | use crate::codegen::common::OrmliteCodegen; 2 | use ormlite_attr::TableMeta; 3 | use proc_macro2::TokenStream; 4 | use quote::quote; 5 | 6 | pub fn impl_Model__select(db: &dyn OrmliteCodegen, attr: &TableMeta) -> TokenStream { 7 | let table_name = &attr.name; 8 | let db = db.database_ts(); 9 | quote! { 10 | fn select<'args>() -> ::ormlite::query_builder::SelectQueryBuilder<'args, #db, Self> { 11 | ::ormlite::query_builder::SelectQueryBuilder::default() 12 | .select(format!("\"{}\".*", #table_name)) 13 | } 14 | } 15 | } 16 | -------------------------------------------------------------------------------- /macro/src/codegen/sqlite.rs: -------------------------------------------------------------------------------- 1 | use crate::codegen::common::OrmliteCodegen; 2 | 3 | use ormlite_core::query_builder::Placeholder; 4 | use proc_macro2::TokenStream; 5 | use quote::quote; 6 | 7 | pub struct SqliteBackend {} 8 | 9 | impl OrmliteCodegen for SqliteBackend { 10 | fn dialect_ts(&self) -> TokenStream { 11 | quote! { ::ormlite::__private::Dialect::Sqlite } 12 | } 13 | fn database_ts(&self) -> TokenStream { 14 | quote! { ::ormlite::sqlite::Sqlite } 15 | } 16 | 17 | fn placeholder_ts(&self) -> TokenStream { 18 | quote! { 19 | ::ormlite::query_builder::Placeholder::question_mark() 20 | } 21 | } 22 | 23 | fn placeholder(&self) -> Placeholder { 24 | Placeholder::question_mark() 25 | } 26 | 27 | fn row(&self) -> TokenStream { 28 | quote! { 29 | ::ormlite::sqlite::SqliteRow 30 | } 31 | } 32 | } 33 | -------------------------------------------------------------------------------- /macro/src/codegen/update.rs: -------------------------------------------------------------------------------- 1 | use crate::codegen::common::{generate_conditional_bind, insertion_binding, OrmliteCodegen}; 2 | use ormlite_attr::ModelMeta; 3 | use proc_macro2::TokenStream; 4 | use quote::quote; 5 | 6 | pub fn impl_Model__update_all_fields(db: &dyn OrmliteCodegen, attr: &ModelMeta) -> TokenStream { 7 | let box_future = crate::util::box_fut_ts(); 8 | let mut placeholder = db.placeholder(); 9 | let db = db.database_ts(); 10 | let mut query = "UPDATE \"".to_string(); 11 | query.push_str(&attr.name); 12 | query.push_str("\" SET "); 13 | for c in attr.database_columns_except_pkey() { 14 | query.push_str(&c.name); 15 | query.push_str(" = "); 16 | query.push_str(&placeholder.next().unwrap()); 17 | query.push_str(", "); 18 | } 19 | // remove the final ", " 20 | query.truncate(query.len() - 2); 21 | query.push_str(" WHERE "); 22 | query.push_str(&attr.pkey.name); 23 | query.push_str(" = "); 24 | query.push_str(&placeholder.next().unwrap()); 25 | query.push_str(" RETURNING *"); 26 | 27 | let id = &attr.pkey.ident; 28 | let query_bindings = attr.database_columns_except_pkey().map(|c| insertion_binding(c)); 29 | 30 | let unwind_joins = attr.many_to_one_joins().map(|c| { 31 | let id = &c.ident; 32 | quote! { 33 | let #id = &model.#id; 34 | } 35 | }); 36 | 37 | quote! { 38 | fn update_all_fields<'e, E>(self, db: E) -> #box_future<'e, ::ormlite::Result> 39 | where 40 | E: 'e +::ormlite::Executor<'e, Database = #db>, 41 | { 42 | Box::pin(async move { 43 | let mut q =::ormlite::query_as::<_, Self>(#query); 44 | let model = self; 45 | #(#unwind_joins)* 46 | #(#query_bindings)* 47 | q.bind(model.#id) 48 | .fetch_one(db) 49 | .await 50 | .map_err(::ormlite::Error::from) 51 | }) 52 | } 53 | } 54 | } 55 | 56 | pub fn impl_ModelBuilder__update(db: &dyn OrmliteCodegen, attr: &ModelMeta) -> TokenStream { 57 | let box_future = crate::util::box_fut_ts(); 58 | let placeholder = db.placeholder_ts(); 59 | let db = db.database_ts(); 60 | 61 | let query = format!( 62 | "UPDATE \"{}\" SET {{}} WHERE {} = {{}} RETURNING *", 63 | attr.name, attr.pkey.name, 64 | ); 65 | 66 | let bind_update = attr.database_columns().map(generate_conditional_bind); 67 | let id = &attr.pkey.ident; 68 | quote! { 69 | fn update<'e: 'a, E>(self, db: E) -> #box_future<'a, ::ormlite::Result> 70 | where 71 | E: 'e +::ormlite::Executor<'e, Database = #db>, 72 | { 73 | Box::pin(async move { 74 | let mut placeholder = #placeholder; 75 | let set_fields = self.modified_fields(); 76 | let update_id = self.updating 77 | .expect("Tried to call ModelBuilder::update(), but the ModelBuilder \ 78 | has no reference to what model to update. You might have called \ 79 | something like: `::build().update(&mut db)`. A partial update \ 80 | looks something like \ 81 | `.update_partial().update(&mut db)`.") 82 | .#id 83 | // NOTE: This clone is free for Copy types. .clone() fixes ormlite#13 84 | .clone(); 85 | let query = format!( 86 | #query, 87 | set_fields.into_iter().map(|f| format!("\"{}\" = {}", f, placeholder.next().unwrap())).collect::>().join(", "), 88 | placeholder.next().unwrap() 89 | ); 90 | let mut q =::ormlite::query_as::<#db, Self::Model>(&query); 91 | #(#bind_update)* 92 | q = q.bind(update_id); 93 | q.fetch_one(db) 94 | .await 95 | .map_err(::ormlite::Error::from) 96 | }) 97 | } 98 | } 99 | } 100 | -------------------------------------------------------------------------------- /macro/src/util.rs: -------------------------------------------------------------------------------- 1 | use proc_macro2::TokenStream; 2 | use quote::quote; 3 | use syn::punctuated::Punctuated; 4 | use syn::Data::Struct; 5 | use syn::{DataStruct, DeriveInput, Field, Fields, FieldsNamed}; 6 | 7 | pub fn box_fut_ts() -> TokenStream { 8 | quote!(::ormlite::BoxFuture) 9 | } 10 | -------------------------------------------------------------------------------- /ormlite/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "ormlite" 3 | version = "0.23.3" 4 | edition = "2021" 5 | authors = ["Kurt Wolf "] 6 | description = "An ORM for people who love SQL" 7 | license = "MIT" 8 | repository = "https://github.com/kurtbuilds/ormlite" 9 | homepage = "https://github.com/kurtbuilds/ormlite" 10 | documentation = "https://docs.rs/ormlite" 11 | autotests = false 12 | readme = "../README.md" 13 | keywords = ["sqlite", "postgres", "orm", "async", "mysql"] 14 | categories = ["database"] 15 | # autoexamples = false 16 | 17 | [package.metadata.docs.rs] 18 | rustdoc-args = ["--cfg", "docsrs"] 19 | all-features = true 20 | 21 | [[test]] 22 | name = "sqlite" 23 | path = "tests/sqlite.rs" 24 | required-features = ["sqlite"] 25 | 26 | [[test]] 27 | name = "simple" 28 | path = "tests/simple.rs" 29 | required-features = ["sqlite"] 30 | 31 | [[test]] 32 | name = "postgres" 33 | path = "tests/postgres.rs" 34 | required-features = ["postgres"] 35 | 36 | [features] 37 | mysql = ["sqlx/mysql", "ormlite-macro/mysql"] 38 | sqlite = ["sqlx/sqlite", "ormlite-macro/sqlite", "ormlite-core/sqlite"] 39 | postgres = ["sqlx/postgres", "ormlite-macro/postgres", "ormlite-core/postgres"] 40 | runtime-tokio-rustls = ["ormlite-macro/runtime-tokio-rustls", "sqlx/runtime-tokio-rustls", "ormlite-core/runtime-tokio-rustls"] 41 | uuid = ["sqlx/uuid"] 42 | decimal = ["sqlx/rust_decimal"] 43 | chrono = ["sqlx/chrono"] 44 | time = ["sqlx/time"] 45 | json = ["sqlx/json"] 46 | default = ["runtime-tokio-rustls"] 47 | 48 | # When you have multiple databases configured, you can use the default- feature to 49 | # set which database the macros will use by default. 50 | # Use the `#[orm(database = "postgres")]` attribute to override the default. 51 | # Use the attribute multiple times to generate code for multiple databases. 52 | default-postgres = ["postgres", "ormlite-macro/default-postgres"] 53 | default-sqlite = ["sqlite", "ormlite-macro/default-sqlite"] 54 | default-mysql = ["mysql", "ormlite-macro/default-mysql"] 55 | 56 | [dependencies] 57 | sqlx = { version = "0.8.2" } 58 | tokio = { version = "1.40.0", features = ["full"] } 59 | ormlite-macro.workspace = true 60 | ormlite-core.workspace = true 61 | sqlx-core.workspace = true 62 | sqlmo.workspace = true 63 | tokio-stream = "0.1.16" 64 | 65 | [dev-dependencies] 66 | trybuild = { version = "1.0.99", features = ["diff"] } 67 | env_logger = "0.11.5" 68 | uuid = { version = "1.10.0", features = ["serde", "v4"] } 69 | serde = { version = "1.0.210", features = ["derive"] } 70 | serde_json = { version = "1.0.128" } 71 | chrono = { version = "0.4.38", features = ["serde"] } 72 | -------------------------------------------------------------------------------- /ormlite/Justfile: -------------------------------------------------------------------------------- 1 | set positional-arguments 2 | set dotenv-load 3 | set export 4 | 5 | help: 6 | @just --list --unsorted 7 | 8 | clean: 9 | cargo clean 10 | 11 | build: 12 | cargo build 13 | alias b := build 14 | 15 | release: 16 | cargo build --release 17 | 18 | install: 19 | cargo install --path . 20 | 21 | bootstrap: 22 | cargo install cargo-edit 23 | 24 | test *ARGS: 25 | # Because trybuild doesn't support passing environment directly, tests are dependent on state, namely the state of the MODEL_FOLDERS 26 | # var in the test runner's process environment. 27 | # To solve that dependence, we need to set --test-threads=1. 28 | # If trybuild was modified to allow passing env for a test run, then 29 | cargo test --features sqlite,uuid --tests -- --test-threads=1 30 | cargo test --features postgres,uuid,chrono --tests -- --test-threads=1 31 | 32 | check: 33 | cargo check 34 | alias c := check 35 | 36 | fix: 37 | cargo clippy --fix 38 | 39 | bench: 40 | cargo criterion --features bench 41 | 42 | # Bump version. level=major,minor,patch 43 | version level: 44 | git diff-index --exit-code HEAD > /dev/null || ! echo You have untracked changes. Commit your changes before bumping the version. 45 | cargo set-version --bump {{level}} 46 | cargo update # This bumps Cargo.lock 47 | VERSION=$(rg "version = \"([0-9.]+)\"" -or '$1' Cargo.toml | head -n1) && \ 48 | git commit -am "Bump version {{level}} to $VERSION" && \ 49 | git tag v$VERSION && \ 50 | git push origin v$VERSION 51 | git push 52 | 53 | publish: 54 | cargo publish 55 | 56 | patch: test 57 | just version patch 58 | just publish 59 | 60 | run *args: 61 | cargo run --features sqlite,runtime-tokio-rustls,uuid "$@" 62 | alias r := run 63 | 64 | # Development workflow for macros 65 | # 1. Write an example for what you want to run 66 | # 2. Run `just run` to run it. Encounter compile errors. 67 | # 3. Run `just expand-run` to expand, then compile it. This compilation step shows actual line numbers on expanded output. 68 | # 4. Fix the errors, and run `just rerun` to confirm the manual edits to the expanded code have fixed the issue. 69 | # 5. Edit the macro code, so that it achieves the same output as your manual edits to the expanded code. 70 | # 6. Dance a jig because you're now a macro wizard and developing macros with 1/100th the guesswork of before. 71 | expand-run: 72 | mkdir -p examples 73 | touch examples/expand.rs 74 | cp tests/sqlite/06-insert.rs examples/expandable.rs 75 | sd -s '../setup.rs' '../tests/setup.rs' examples/expandable.rs -f 76 | printf '#![allow(unused)]\n' > examples/expand.rs 77 | printf '#![allow(internal_features)]\n' > examples/expand.rs 78 | printf '#![feature(fmt_internals)]\n' >> examples/expand.rs 79 | printf '#![feature(fmt_helpers_for_derive)]\n' >> examples/expand.rs 80 | printf '#![feature(print_internals)]\n' >> examples/expand.rs 81 | printf '#![feature(hint_must_use)]\n' >> examples/expand.rs 82 | printf '#![feature(panic_internals)]\n' >> examples/expand.rs 83 | cargo expand --example expandable --features sqlite,uuid >> examples/expand.rs 84 | sd '::alloc::' '::std::' examples/expand.rs -f 85 | sd '::std::__export::must_use' '::std::hint::must_use' examples/expand.rs -f 86 | sd '# ?\[ormlite.*' '' examples/expand.rs -f 87 | sd -s '#[rustc_box]' '' examples/expand.rs -f 88 | @just rerun 89 | alias er := expand-run 90 | 91 | rerun: 92 | cargo +nightly run --example expand --features sqlite,uuid 93 | alias rr := rerun 94 | 95 | backtrace: 96 | RUSTFLAGS="-Z macro-backtrace" cargo run --example many-to-one --features sqlite,runtime-tokio-rustls,uuid 97 | 98 | postgres *ARGS: 99 | cargo test -F postgres,uuid,chrono --tests $ARGS -- --test-threads=1 100 | -------------------------------------------------------------------------------- /ormlite/README.md: -------------------------------------------------------------------------------- 1 | # Development 2 | 3 | Run the derive model code: 4 | 5 | cargo run --bin simple --features runtime-tokio-rustls,sqlite 6 | 7 | Run test code: 8 | 9 | cargo test --features runtime-tokio-rustls,sqlite 10 | 11 | Run tests themselves 12 | 13 | 14 | ### Workflow 15 | 16 | Try to build and compile using the derive macros. 17 | 18 | just run --bin plain 19 | 20 | Copy and paste into expanded.rs if you need to see details about the expanded code. 21 | 22 | cargo expand --bin plain | pbcopy 23 | -------------------------------------------------------------------------------- /ormlite/examples/expandable.rs: -------------------------------------------------------------------------------- 1 | use ormlite::model::{Insert, Join, JoinMeta, Model}; 2 | use serde::{Deserialize, Serialize}; 3 | use serde_json::json; 4 | use sqlmo::ToSql; 5 | 6 | use ormlite::Connection; 7 | #[path = "../tests/setup.rs"] 8 | mod setup; 9 | 10 | #[derive(Debug, Model, Clone, Serialize, Deserialize)] 11 | pub struct Organization { 12 | id: i32, 13 | name: String, 14 | } 15 | 16 | #[derive(Model)] 17 | #[ormlite(insert = "InsertUser", extra_derives(Serialize, Deserialize))] 18 | // Note the previous syntax, #[ormlite(insertable = InsertUser)] still works, but the new syntax is preferred. 19 | pub struct User { 20 | id: i32, 21 | name: String, 22 | #[ormlite(default)] 23 | secret: Option, 24 | #[ormlite(default_value = "5")] 25 | number: i32, 26 | #[ormlite(column = "type")] 27 | ty: i32, 28 | #[ormlite(column = "org_id")] 29 | organization: Join, 30 | } 31 | 32 | #[derive(Insert)] 33 | #[ormlite(returns = "User")] 34 | pub struct InsertUser2 { 35 | name: String, 36 | number: i32, 37 | #[ormlite(column = "type")] 38 | ty: i32, 39 | org_id: i32, 40 | } 41 | 42 | #[tokio::main] 43 | async fn main() { 44 | env_logger::init(); 45 | let mut db = ormlite::sqlite::SqliteConnection::connect(":memory:").await.unwrap(); 46 | let migration = setup::migrate_self(&[file!()]); 47 | for s in migration.statements { 48 | let sql = s.to_sql(sqlmo::Dialect::Sqlite); 49 | ormlite::query(&sql).execute(&mut db).await.unwrap(); 50 | } 51 | 52 | let org = Organization { 53 | id: 12321, 54 | name: "my org".to_string(), 55 | }; 56 | 57 | let champ = InsertUser { 58 | name: "Champ".to_string(), 59 | organization: Join::new(org.clone()), 60 | ty: 12, 61 | } 62 | .insert(&mut db) 63 | .await 64 | .unwrap(); 65 | 66 | assert_eq!(champ.id, 1); 67 | assert_eq!(champ.secret, None); 68 | assert_eq!(champ.number, 5); 69 | assert_eq!(champ.organization.id, 12321); 70 | assert_eq!(champ.organization.name, "my org"); 71 | 72 | let champ_copy = InsertUser { 73 | name: "Champ".to_string(), 74 | organization: Join::new(org.clone()), 75 | ty: 12, 76 | }; 77 | let champ_json = json!(champ_copy).to_string(); 78 | 79 | assert_eq!( 80 | champ_json, 81 | r#"{"name":"Champ","organization":{"id":12321,"name":"my org"},"ty":12}"# 82 | ); 83 | 84 | let champ_deserializing = 85 | serde_json::from_str::(r#"{"name":"Champ","organization":{"id":12321,"name":"my org"},"ty":12}"#); 86 | 87 | let Ok(champ_deserialized) = champ_deserializing else { 88 | panic!("Deserialize failing"); 89 | }; 90 | 91 | assert_eq!(champ_deserialized.name, champ_copy.name); 92 | assert_eq!(champ_deserialized.organization.name, champ_copy.organization.name); 93 | 94 | let millie = InsertUser { 95 | name: "Millie".to_string(), 96 | organization: Join::new(org), 97 | ty: 3, 98 | } 99 | .insert(&mut db) 100 | .await 101 | .unwrap(); 102 | assert_eq!(millie.id, 2); 103 | assert_eq!(millie.secret, None); 104 | assert_eq!(millie.number, 5); 105 | assert_eq!(millie.organization.id, 12321); 106 | assert_eq!(millie.organization.name, "my org"); 107 | 108 | let enoki = InsertUser { 109 | name: "Enoki".to_string(), 110 | organization: Join::new_with_id(12321), 111 | ty: 6, 112 | } 113 | .insert(&mut db) 114 | .await 115 | .unwrap(); 116 | assert_eq!(enoki.id, 3); 117 | assert_eq!(enoki.secret, None); 118 | assert_eq!(enoki.number, 5); 119 | assert_eq!(enoki.organization.id, 12321); 120 | assert_eq!(enoki.organization.loaded(), false); 121 | 122 | let user = InsertUser2 { 123 | name: "Kloud".to_string(), 124 | number: 12, 125 | ty: 8, 126 | org_id: 12321, 127 | } 128 | .insert(&mut db) 129 | .await 130 | .unwrap(); 131 | assert_eq!(user.id, 4); 132 | assert_eq!(user.name, "Kloud"); 133 | let user = User::fetch_one(4, &mut db).await.unwrap(); 134 | assert_eq!(user.id, 4); 135 | assert_eq!(user.name, "Kloud"); 136 | assert_eq!(user.ty, 8); 137 | assert_eq!(user.organization.id, 12321); 138 | 139 | let orgs = vec![ 140 | Organization { 141 | id: 1, 142 | name: "bulk-org1".to_string(), 143 | }, 144 | Organization { 145 | id: 2, 146 | name: "bulk-org2".to_string(), 147 | }, 148 | Organization { 149 | id: 3, 150 | name: "bulk-org3".to_string(), 151 | }, 152 | ]; 153 | let orgs = Organization::insert_many(orgs, &mut db).await.unwrap(); 154 | assert_eq!(orgs.len(), 3); 155 | } 156 | -------------------------------------------------------------------------------- /ormlite/src/lib.rs: -------------------------------------------------------------------------------- 1 | #![cfg_attr(docsrs, feature(doc_cfg))] 2 | pub use model::{FromRow, Insert, IntoArguments, Model, TableMeta}; 3 | pub use ormlite_core::BoxFuture; 4 | pub use ormlite_core::{Error, Result}; 5 | pub use ormlite_macro::Enum; 6 | pub use sqlx::{Column, ColumnIndex, Database, Decode, Row}; 7 | pub use tokio_stream::StreamExt; 8 | 9 | pub use sqlx::pool::PoolOptions; 10 | pub use sqlx::{ 11 | query, query_as, query_as_with, query_with, Acquire, Arguments, ConnectOptions, Connection, Encode, Executor, Pool, 12 | }; 13 | 14 | pub mod model; 15 | 16 | pub mod query_builder { 17 | pub use ormlite_core::insert::OnConflict; 18 | pub use ormlite_core::query_builder::{Placeholder, QueryBuilderArgs, SelectQueryBuilder}; 19 | } 20 | 21 | pub mod types { 22 | pub use ormlite_macro::ManualType; 23 | pub use sqlx::types::*; 24 | } 25 | 26 | pub mod decode { 27 | pub use sqlx::decode::*; 28 | } 29 | 30 | pub use sqlx::Error as SqlxError; 31 | 32 | pub mod database { 33 | pub use sqlx::database::*; 34 | } 35 | 36 | /// We need objects available for proc-macros that aren't meant to be available to end users. This module does that. 37 | #[doc(hidden)] 38 | pub mod __private { 39 | pub use ormlite_core::insert::Insertion; 40 | pub use ormlite_core::join::JoinDescription; 41 | pub use sqlmo::query::{Values, Value}; 42 | pub use sqlmo::{Insert, Dialect, ToSql}; 43 | } 44 | 45 | #[cfg(feature = "postgres")] 46 | #[cfg_attr(docsrs, doc(cfg(feature = "postgres")))] 47 | pub mod postgres { 48 | pub use sqlx::postgres::*; 49 | } 50 | 51 | #[cfg(feature = "sqlite")] 52 | #[cfg_attr(docsrs, doc(cfg(feature = "sqlite")))] 53 | pub mod sqlite { 54 | pub use sqlx::sqlite::*; 55 | } 56 | -------------------------------------------------------------------------------- /ormlite/src/model.rs: -------------------------------------------------------------------------------- 1 | pub use ormlite_core::join::{Join, JoinMeta, Loadable}; 2 | pub use ormlite_core::model::*; 3 | pub use ormlite_macro::{FromRow, Insert, IntoArguments, Model, TableMeta}; 4 | pub use sqlx::IntoArguments; 5 | pub use sqlx_core::from_row::FromRow; 6 | -------------------------------------------------------------------------------- /ormlite/tests/incomplete_tests/multiple_databases/03-many-to-many.rs: -------------------------------------------------------------------------------- 1 | use ormlite::model::*; 2 | use ormlite::Connection; 3 | use uuid::Uuid; 4 | 5 | #[derive(Model)] 6 | pub struct Person { 7 | id: Uuid, 8 | name: String, 9 | age: u8, 10 | } 11 | 12 | 13 | pub static CREATE_TABLE_SQL: &str = 14 | "CREATE TABLE person (id text PRIMARY KEY, name TEXT, age INTEGER)"; 15 | 16 | #[tokio::main] 17 | async fn main() -> Result<(), Box> { 18 | // env_logger::init(); 19 | // let mut db = ormlite::sqlite::SqliteConnection::connect(":memory:").await.unwrap(); 20 | // ormlite::query(CREATE_TABLE_SQL) 21 | // .execute(&mut db) 22 | // .await?; 23 | // 24 | // let p = Person { 25 | // id: Uuid::new_v4(), 26 | // name: "John".to_string(), 27 | // age: 99, 28 | // }.insert(&mut db).await?; 29 | // 30 | // let p = p.update_partial() 31 | // .age(100) 32 | // .update(&mut db) 33 | // .await?; 34 | // 35 | // assert_eq!(p.age, 100); 36 | Ok(()) 37 | } 38 | -------------------------------------------------------------------------------- /ormlite/tests/incomplete_tests/multiple_databases/04-one-to-many.rs: -------------------------------------------------------------------------------- 1 | use ormlite::model::*; 2 | use ormlite::Connection; 3 | use uuid::Uuid; 4 | 5 | #[derive(Model)] 6 | pub struct Person { 7 | id: Uuid, 8 | name: String, 9 | age: u8, 10 | } 11 | 12 | 13 | pub static CREATE_TABLE_SQL: &str = 14 | "CREATE TABLE person (id text PRIMARY KEY, name TEXT, age INTEGER)"; 15 | 16 | #[tokio::main] 17 | async fn main() -> Result<(), Box> { 18 | env_logger::init(); 19 | // let mut db = ormlite::sqlite::SqliteConnection::connect(":memory:").await.unwrap(); 20 | // ormlite::query(CREATE_TABLE_SQL) 21 | // .execute(&mut db) 22 | // .await?; 23 | // 24 | // let p = Person { 25 | // id: Uuid::new_v4(), 26 | // name: "John".to_string(), 27 | // age: 99, 28 | // }.insert(&mut db).await?; 29 | // 30 | // let p = p.update_partial() 31 | // .age(100) 32 | // .update(&mut db) 33 | // .await?; 34 | // 35 | // assert_eq!(p.age, 100); 36 | Ok(()) 37 | } 38 | -------------------------------------------------------------------------------- /ormlite/tests/incomplete_tests/multiple_databases/main.rs: -------------------------------------------------------------------------------- 1 | #[path = "../setup.rs"] 2 | mod setup; 3 | 4 | use ormlite::model::*; 5 | use ormlite::Connection; 6 | use sqlmo::ToSql; 7 | use uuid::Uuid; 8 | 9 | #[derive(Model)] 10 | #[ormlite(database = "sqlite")] 11 | #[ormlite(database = "postgres")] 12 | pub struct Person { 13 | id: Uuid, 14 | name: String, 15 | age: u8, 16 | } 17 | 18 | 19 | #[tokio::main] 20 | async fn main() -> Result<(), Box> { 21 | env_logger::init(); 22 | let mut db = ormlite::sqlite::SqliteConnection::connect(":memory:").await.unwrap(); 23 | let migration = crate::setup::migrate_self(&[file!()]); 24 | for s in migration.statements { 25 | let sql = s.to_sql(sqlmo::Dialect::Sqlite); 26 | ormlite::query(&sql) 27 | .execute(&mut db) 28 | .await?; 29 | } 30 | 31 | let p = Person { 32 | id: Uuid::new_v4(), 33 | name: "John".to_string(), 34 | age: 99, 35 | }.insert(&mut db).await?; 36 | 37 | let p = p.update_partial() 38 | .age(100) 39 | .update(&mut db) 40 | .await?; 41 | 42 | assert_eq!(p.age, 100); 43 | Ok(()) 44 | } 45 | -------------------------------------------------------------------------------- /ormlite/tests/incomplete_tests/multiple_databases/multiple-databases.rs: -------------------------------------------------------------------------------- 1 | /// Note this is work in progress and not working yet... 2 | #[path = "run.rs"] 3 | mod run; 4 | 5 | use trybuild::TestCases; 6 | 7 | #[test] 8 | fn test_multifile() { 9 | run::set_dir_and_run("tests/multiple_databases", "main.rs"); 10 | } -------------------------------------------------------------------------------- /ormlite/tests/multifile/main.rs: -------------------------------------------------------------------------------- 1 | #[path = "../setup.rs"] 2 | mod setup; 3 | 4 | mod user; 5 | mod organization; 6 | 7 | pub use user::User; 8 | pub use organization::Organization; 9 | use uuid::Uuid; 10 | use ormlite::model::*; 11 | use ormlite::sqlite::SqliteConnection; 12 | use ormlite::Connection; 13 | use sqlmo::ToSql; 14 | 15 | #[tokio::main] 16 | async fn main() -> Result<(), Box> { 17 | let mut conn = SqliteConnection::connect(":memory:").await?; 18 | let migration = setup::migrate_self(&[ 19 | &std::path::Path::new(file!()).parent().unwrap().display().to_string(), 20 | ]); 21 | for s in migration.statements { 22 | let sql = s.to_sql(sqlmo::Dialect::Sqlite); 23 | ormlite::query(&sql) 24 | .execute(&mut conn) 25 | .await?; 26 | } 27 | 28 | let org_id = Uuid::new_v4(); 29 | let org = Organization { 30 | id: org_id, 31 | name: "Acme".to_string(), 32 | is_active: true, 33 | }; 34 | let user = User { 35 | id: Uuid::new_v4(), 36 | name: "John".to_string(), 37 | age: 99, 38 | organization: Join::new(org), 39 | }; 40 | let user = user.insert(&mut conn).await?; 41 | assert_eq!(user.organization.id, org_id); 42 | Ok(()) 43 | } -------------------------------------------------------------------------------- /ormlite/tests/multifile/organization.rs: -------------------------------------------------------------------------------- 1 | use ormlite::types::Uuid; 2 | use ormlite::model::*; 3 | 4 | #[derive(Debug, Model)] 5 | pub struct Organization { 6 | pub id: Uuid, 7 | pub name: String, 8 | pub is_active: bool, 9 | } -------------------------------------------------------------------------------- /ormlite/tests/multifile/user.rs: -------------------------------------------------------------------------------- 1 | use crate::organization::Organization; 2 | use ormlite::model::*; 3 | use ormlite::types::Uuid; 4 | 5 | #[derive(Debug, Model)] 6 | pub struct User { 7 | pub id: Uuid, 8 | pub name: String, 9 | pub age: u8, 10 | #[ormlite(column = "organization_id")] 11 | pub organization: Join, 12 | } 13 | -------------------------------------------------------------------------------- /ormlite/tests/postgres.rs: -------------------------------------------------------------------------------- 1 | #![cfg(not(any(feature = "sqlite", feature = "mysql")))] 2 | #[path = "./run.rs"] 3 | mod run; 4 | 5 | use run::*; 6 | 7 | #[test] 8 | fn test_postgres_complex() { 9 | set_path_and_run("tests/postgres/complex.rs"); 10 | } 11 | 12 | // #[test] 13 | // fn test_postgres_join() { 14 | // set_path_and_run("tests/postgres/join.rs"); 15 | // } 16 | -------------------------------------------------------------------------------- /ormlite/tests/postgres/complex.rs: -------------------------------------------------------------------------------- 1 | use chrono::{DateTime, Utc}; 2 | use ormlite::model::*; 3 | use ormlite::types::Json; 4 | use uuid::Uuid; 5 | use serde::{Serialize, Deserialize}; 6 | 7 | #[derive(sqlx::Type)] 8 | #[repr(i8)] 9 | enum JobType { 10 | All = 1, 11 | Any = 2, 12 | } 13 | 14 | #[derive(Serialize, Deserialize)] 15 | struct JobData { 16 | count: i32, 17 | value: String, 18 | timestamp: DateTime, 19 | } 20 | 21 | 22 | #[derive(Model)] 23 | struct Job { 24 | id: i32, 25 | typ: JobType, 26 | name: String, 27 | data: Json, 28 | #[ormlite(json)] 29 | data2: JobData, 30 | #[allow(dead_code)] 31 | #[ormlite(skip)] 32 | skipped: Option, 33 | } 34 | 35 | #[derive(IntoArguments)] 36 | struct ApiJob { 37 | id: i32, 38 | typ: JobType, 39 | name: String, 40 | #[ormlite(json)] 41 | data: JobData, 42 | } 43 | 44 | #[tokio::main] 45 | async fn main() { 46 | assert_eq!(Job::table_columns(), vec![ 47 | "id".to_string(), 48 | "typ".to_string(), 49 | "name".to_string(), 50 | "data".to_string(), 51 | "data2".to_string(), 52 | ]); 53 | } -------------------------------------------------------------------------------- /ormlite/tests/postgres/join.rs: -------------------------------------------------------------------------------- 1 | use ormlite::model::*; 2 | use ormlite::{query, Connection}; 3 | use sqlmo::{Dialect, ToSql}; 4 | use uuid::Uuid; 5 | 6 | #[path = "../setup.rs"] 7 | mod setup; 8 | 9 | #[derive(Model)] 10 | #[ormlite(table = "users")] 11 | struct User { 12 | id: Uuid, 13 | name: String, 14 | #[ormlite(column = "org_id")] 15 | org: Join, 16 | 17 | #[ormlite(column = "subscription_id")] 18 | subscription: Join>, 19 | 20 | // #[ormlite(foreign_field = Photo::user_id, sort = "id")] 21 | #[ormlite(foreign_field = Photo::user_id)] 22 | photos: Join>, 23 | } 24 | 25 | #[derive(Model)] 26 | struct Organization { 27 | id: Uuid, 28 | name: String, 29 | } 30 | 31 | #[derive(Model)] 32 | struct Subscription { 33 | id: Uuid, 34 | name: String, 35 | } 36 | 37 | #[derive(Model)] 38 | struct Photo { 39 | id: Uuid, 40 | user_id: Uuid, 41 | name: String, 42 | } 43 | 44 | #[tokio::main] 45 | async fn main() { 46 | env_logger::init(); 47 | let url = std::env::var("DATABASE_URL").unwrap(); 48 | let mut conn = ormlite::postgres::PgConnection::connect(&url).await.unwrap(); 49 | query("drop table if exists users, organization, photo, subscription") 50 | .execute(&mut conn) 51 | .await 52 | .unwrap(); 53 | let migration = setup::migrate_self(&[file!()]); 54 | for s in migration.statements { 55 | let sql = s.to_sql(Dialect::Postgres); 56 | query(&sql).execute(&mut conn).await.unwrap(); 57 | } 58 | } 59 | -------------------------------------------------------------------------------- /ormlite/tests/run.rs: -------------------------------------------------------------------------------- 1 | #![allow(dead_code)] 2 | /// This is a utils mod for the test suite. 3 | /// Import it from a test like so: 4 | /// ``` 5 | /// #[path = "run.rs"] 6 | /// mod run; 7 | use trybuild::TestCases; 8 | 9 | const FOO: &str = env!("CARGO_MANIFEST_DIR"); 10 | 11 | pub fn set_path_and_run(path: &str) { 12 | let t = TestCases::new(); 13 | let p = std::path::Path::new(&FOO).join(path); 14 | std::env::set_var("MODEL_FOLDERS", p.display().to_string()); 15 | t.pass(path); 16 | } 17 | 18 | // Ues if we have models across a directory, and MODEL_FOLDERS needs to be set. 19 | pub fn set_dir_and_run(dir: &str, subpath: &str) { 20 | let t = TestCases::new(); 21 | let p = std::path::Path::new(&FOO).join(dir); 22 | std::env::set_var("MODEL_FOLDERS", p.display().to_string()); 23 | t.pass(p.join(subpath).display().to_string()); 24 | } 25 | -------------------------------------------------------------------------------- /ormlite/tests/setup.rs: -------------------------------------------------------------------------------- 1 | /// This is a helper function to run migrations for tests. 2 | /// Import it from within a buildable test (i.e. a test built by trybuild) like so: 3 | /// ``` 4 | /// #[path = "setup.rs"] 5 | /// mod setup; 6 | /// 7 | /// 8 | 9 | #[allow(dead_code)] 10 | pub fn migrate_self(files: &[&str]) -> sqlmo::Migration { 11 | use ormlite_core::schema::schema_from_ormlite_project; 12 | let paths = files.iter().map(std::path::Path::new).collect::>(); 13 | let cfg = ormlite_core::config::Config::default(); 14 | let schema: sqlmo::Schema = schema_from_ormlite_project(&paths, &cfg).unwrap(); 15 | let opt = sqlmo::MigrationOptions::default(); 16 | let migration = sqlmo::Schema::default().migrate_to(schema, &opt).unwrap(); 17 | migration 18 | } 19 | -------------------------------------------------------------------------------- /ormlite/tests/simple.rs: -------------------------------------------------------------------------------- 1 | use ormlite::model::*; 2 | use ormlite::Connection; 3 | 4 | #[derive(Model, Debug)] 5 | #[ormlite(insertable = InsertPerson)] 6 | // #[index(col, col2, col3, unique = true, name = "my_index", type="btree")] 7 | pub struct Person { 8 | pub id: i32, 9 | pub name: String, 10 | pub age: i16, 11 | } 12 | 13 | pub static CREATE_TABLE_SQL: &str = "CREATE TABLE person (id INTEGER PRIMARY KEY, name TEXT, age INTEGER)"; 14 | 15 | #[tokio::test] 16 | async fn main() -> Result<(), Box> { 17 | let mut conn = ormlite::sqlite::SqliteConnection::connect(":memory:").await.unwrap(); 18 | env_logger::init(); 19 | 20 | ormlite::query(CREATE_TABLE_SQL).execute(&mut conn).await?; 21 | 22 | // You can insert the model directly. 23 | let mut john = Person { 24 | id: 1, 25 | name: "John".to_string(), 26 | age: 99, 27 | } 28 | .insert(&mut conn) 29 | .await?; 30 | println!("{:?}", john); 31 | 32 | println!("select"); 33 | let people = Person::select().where_bind("age > ?", 50).fetch_all(&mut conn).await?; 34 | println!("select query builder {:?}", people); 35 | 36 | let r = sqlx::query_as::<_, Person>("select * from person where age > ?") 37 | .bind(50) 38 | .fetch_all(&mut conn) 39 | .await?; 40 | println!("sqlx {:?}", r); 41 | 42 | // After modifying the object, you can update all fields directly. 43 | john.age = john.age + 1; 44 | john = john.update_all_fields(&mut conn).await?; 45 | println!("{:?}", john); 46 | 47 | // Lastly, you can delete the object. 48 | john.delete(&mut conn).await?; 49 | // You can get a single user. 50 | Person::fetch_one(1, &mut conn).await.expect_err("Should not exist"); 51 | 52 | Person { 53 | id: 1, 54 | name: "Dan".to_string(), 55 | age: 28, 56 | } 57 | .insert(&mut conn) 58 | .await?; 59 | 60 | let dan = Person::fetch_one(1, &mut conn).await?; 61 | println!("get_one {:?}", dan); 62 | 63 | let dan2 = dan.update_partial().age(29).update(&mut conn).await?; 64 | println!("dan1 {:?}", dan); 65 | println!("dan2 {:?}", dan2); 66 | 67 | InsertPerson { 68 | name: "Albert Einstein".to_string(), 69 | age: 60, 70 | } 71 | .insert(&mut conn) 72 | .await?; 73 | 74 | let kurt = Person::builder() 75 | .name("Kurt".to_string()) 76 | .age(29) 77 | .insert(&mut conn) 78 | .await?; 79 | println!("built {:?}", kurt); 80 | // // You can create a query builder. 81 | let people = Person::select().where_("age > ?").bind(50).fetch_all(&mut conn).await?; 82 | println!("select builder {:?}", people); 83 | 84 | let people = Person::query("SELECT * FROM person WHERE age > ?") 85 | .bind(20) 86 | .fetch_all(&mut conn) 87 | .await?; 88 | println!("raw query: {:?}", people); 89 | Ok(()) 90 | } 91 | -------------------------------------------------------------------------------- /ormlite/tests/sqlite.rs: -------------------------------------------------------------------------------- 1 | #![cfg(not(any(feature = "postgres", feature = "mysql")))] 2 | #[path = "./run.rs"] 3 | mod run; 4 | 5 | use run::*; 6 | 7 | #[test] 8 | fn test_sqlite() { 9 | set_path_and_run("tests/sqlite/01-table-meta.rs"); 10 | set_path_and_run("tests/sqlite/02-update-partial.rs"); 11 | set_path_and_run("tests/sqlite/03-many-to-one-join.rs"); 12 | set_path_and_run("tests/sqlite/04-allow-clone-primary-key.rs"); 13 | set_path_and_run("tests/sqlite/05-keyword-column.rs"); 14 | set_path_and_run("tests/sqlite/06-insert.rs"); 15 | // t.pass("tests/03-many-to-many.rs"); 16 | // t.pass("tests/04-one-to-many.rs"); 17 | } 18 | 19 | #[test] 20 | fn test_multifile() { 21 | set_dir_and_run("tests/multifile", "main.rs"); 22 | } 23 | -------------------------------------------------------------------------------- /ormlite/tests/sqlite/01-table-meta.rs: -------------------------------------------------------------------------------- 1 | #![allow(unused)] 2 | use uuid::Uuid; 3 | use ormlite::TableMeta; 4 | use serde::Serialize; 5 | 6 | #[derive(TableMeta)] 7 | pub struct Person { 8 | id: Uuid, 9 | name: String, 10 | age: u8, 11 | } 12 | 13 | #[derive(TableMeta, Serialize)] 14 | pub struct Person2 { 15 | id: Uuid, 16 | #[ormlite(column = "old")] 17 | age: u8, 18 | } 19 | 20 | fn main() { 21 | assert_eq!(Person::table_name(), "person"); 22 | assert_eq!(Person::table_columns(), &["id", "name", "age"]); 23 | assert_eq!(Person::primary_key(), Some("id")); 24 | 25 | assert_eq!(Person2::table_columns(), &["id", "old"]); 26 | } 27 | -------------------------------------------------------------------------------- /ormlite/tests/sqlite/02-update-partial.rs: -------------------------------------------------------------------------------- 1 | #[path = "../setup.rs"] 2 | mod setup; 3 | 4 | use ormlite::model::*; 5 | use ormlite::Connection; 6 | use sqlmo::ToSql; 7 | use uuid::Uuid; 8 | 9 | #[derive(Model)] 10 | pub struct Person { 11 | id: Uuid, 12 | name: String, 13 | age: u8, 14 | } 15 | 16 | 17 | #[tokio::main] 18 | async fn main() { 19 | let mut db = ormlite::sqlite::SqliteConnection::connect(":memory:") 20 | .await 21 | .unwrap(); 22 | let migration = setup::migrate_self(&[file!()]); 23 | for s in migration.statements { 24 | let sql = s.to_sql(sqlmo::Dialect::Sqlite); 25 | ormlite::query(&sql) 26 | .execute(&mut db) 27 | .await 28 | .unwrap(); 29 | } 30 | 31 | let p = Person { 32 | id: Uuid::new_v4(), 33 | name: "John".to_string(), 34 | age: 99, 35 | }.insert(&mut db) 36 | .await 37 | .unwrap(); 38 | 39 | let p = p.update_partial() 40 | .age(100) 41 | .update(&mut db) 42 | .await 43 | .unwrap(); 44 | 45 | assert_eq!(p.age, 100); 46 | } 47 | -------------------------------------------------------------------------------- /ormlite/tests/sqlite/03-many-to-one-join.rs: -------------------------------------------------------------------------------- 1 | use ormlite::model::*; 2 | use ormlite::Connection; 3 | use uuid::Uuid; 4 | 5 | #[derive(Model, Debug)] 6 | pub struct Person { 7 | id: Uuid, 8 | name: String, 9 | age: u8, 10 | #[ormlite(column = "org_id")] 11 | organization: Join, 12 | } 13 | 14 | #[derive(Model, Clone, Debug)] 15 | #[ormlite(table = "orgs")] 16 | pub struct Organization { 17 | id: Uuid, 18 | name: String, 19 | } 20 | 21 | pub static CREATE_PERSON_SQL: &str = "CREATE TABLE person (id text PRIMARY KEY, name TEXT, age INTEGER, org_id text)"; 22 | 23 | pub static CREATE_ORG_SQL: &str = "CREATE TABLE orgs (id text PRIMARY KEY, name TEXT)"; 24 | 25 | #[tokio::main] 26 | async fn main() -> Result<(), Box> { 27 | env_logger::init(); 28 | let mut db = ormlite::sqlite::SqliteConnection::connect(":memory:").await.unwrap(); 29 | ormlite::query(CREATE_PERSON_SQL).execute(&mut db).await?; 30 | ormlite::query(CREATE_ORG_SQL).execute(&mut db).await?; 31 | 32 | let org = Organization { 33 | id: Uuid::new_v4(), 34 | name: "my org".to_string(), 35 | }; 36 | let p1 = Person { 37 | id: Uuid::new_v4(), 38 | name: "John".to_string(), 39 | age: 102, 40 | organization: Join::new(org.clone()), 41 | } 42 | .insert(&mut db) 43 | .await 44 | .unwrap(); 45 | assert_eq!( 46 | p1.organization.id, org.id, 47 | "setting the org object should overwrite the org_id field on insert." 48 | ); 49 | assert_eq!(p1.organization._id(), org.id); 50 | 51 | let org = Organization::select() 52 | .where_bind("id = ?", &org.id) 53 | .fetch_one(&mut db) 54 | .await 55 | .unwrap(); 56 | assert_eq!( 57 | org.name, "my org", 58 | "org gets inserted even though we didn't manually insert it." 59 | ); 60 | 61 | let p2 = Person { 62 | id: Uuid::new_v4(), 63 | name: "p2".to_string(), 64 | age: 98, 65 | organization: Join::new(org.clone()), 66 | } 67 | .insert(&mut db) 68 | .await 69 | .unwrap(); 70 | assert_eq!( 71 | p2.organization.id, org.id, 72 | "we can do insertion with an existing join obj, and it will pass the error." 73 | ); 74 | 75 | let orgs = Organization::select().fetch_all(&mut db).await.unwrap(); 76 | assert_eq!(orgs.len(), 1, "exactly 1 orgs"); 77 | 78 | let people = Person::select().fetch_all(&mut db).await.unwrap(); 79 | assert_eq!(people.len(), 2, "exactly 2 people"); 80 | 81 | let people = Person::select() 82 | .join(Person::organization()) 83 | .fetch_all(&mut db) 84 | .await 85 | .unwrap(); 86 | assert_eq!(people.len(), 2, "exactly 2 people"); 87 | for person in &people { 88 | assert_eq!(person.organization.name, "my org", "we can join on the org"); 89 | } 90 | Ok(()) 91 | } 92 | -------------------------------------------------------------------------------- /ormlite/tests/sqlite/04-allow-clone-primary-key.rs: -------------------------------------------------------------------------------- 1 | use ormlite::model::*; 2 | use ormlite::Connection; 3 | use uuid::Uuid; 4 | 5 | #[derive(Model)] 6 | pub struct Person { 7 | id: Uuid, 8 | #[ormlite(primary_key)] 9 | name: String, 10 | age: u8, 11 | } 12 | 13 | 14 | pub static CREATE_TABLE_SQL: &str = 15 | "CREATE TABLE person (id text PRIMARY KEY, name TEXT, age INTEGER)"; 16 | 17 | #[tokio::main] 18 | async fn main() -> Result<(), Box> { 19 | env_logger::init(); 20 | let mut db = ormlite::sqlite::SqliteConnection::connect(":memory:").await.unwrap(); 21 | ormlite::query(CREATE_TABLE_SQL) 22 | .execute(&mut db) 23 | .await?; 24 | 25 | let p = Person { 26 | id: Uuid::new_v4(), 27 | name: "John".to_string(), 28 | age: 99, 29 | }.insert(&mut db).await?; 30 | 31 | let p = p.update_partial() 32 | .age(100) 33 | .update(&mut db) 34 | .await?; 35 | 36 | assert_eq!(p.age, 100); 37 | 38 | Ok(()) 39 | } 40 | -------------------------------------------------------------------------------- /ormlite/tests/sqlite/05-keyword-column.rs: -------------------------------------------------------------------------------- 1 | use ormlite::Model; 2 | use ormlite::TableMeta; 3 | use sqlmo::ToSql; 4 | 5 | use ormlite::Connection; 6 | #[path = "../setup.rs"] 7 | mod setup; 8 | 9 | #[derive(Model)] 10 | pub struct User { 11 | id: i32, 12 | #[ormlite(column = "type")] 13 | typ: String, 14 | } 15 | 16 | #[tokio::main] 17 | async fn main() { 18 | let mut db = ormlite::sqlite::SqliteConnection::connect(":memory:") 19 | .await 20 | .unwrap(); 21 | let migration = setup::migrate_self(&[file!()]); 22 | for s in migration.statements { 23 | let sql = s.to_sql(sqlmo::Dialect::Sqlite); 24 | ormlite::query(&sql) 25 | .execute(&mut db) 26 | .await 27 | .unwrap(); 28 | } 29 | 30 | User { 31 | id: 1, 32 | typ: "admin".to_string(), 33 | }.insert(&mut db) 34 | .await 35 | .unwrap(); 36 | 37 | let users = User::select() 38 | .fetch_all(&mut db) 39 | .await 40 | .unwrap(); 41 | 42 | assert_eq!(User::table_name(), "user"); 43 | assert_eq!(User::table_columns(), &["id", "type"]); 44 | 45 | assert_eq!(users.len(), 1); 46 | assert_eq!(users[0].typ, "admin"); 47 | } -------------------------------------------------------------------------------- /ormlite/tests/sqlite/06-insert.rs: -------------------------------------------------------------------------------- 1 | use ormlite::model::{Insert, Join, JoinMeta, Model}; 2 | use serde::{Deserialize, Serialize}; 3 | use serde_json::json; 4 | use sqlmo::ToSql; 5 | 6 | use ormlite::Connection; 7 | #[path = "../setup.rs"] 8 | mod setup; 9 | 10 | #[derive(Debug, Model, Clone, Serialize, Deserialize)] 11 | pub struct Organization { 12 | id: i32, 13 | name: String, 14 | } 15 | 16 | #[derive(Model)] 17 | #[ormlite(insert = "InsertUser", extra_derives(Serialize, Deserialize))] 18 | // Note the previous syntax, #[ormlite(insertable = InsertUser)] still works, but the new syntax is preferred. 19 | pub struct User { 20 | id: i32, 21 | name: String, 22 | #[ormlite(default)] 23 | secret: Option, 24 | #[ormlite(default_value = "5")] 25 | number: i32, 26 | #[ormlite(column = "type")] 27 | ty: i32, 28 | #[ormlite(column = "org_id")] 29 | organization: Join, 30 | } 31 | 32 | #[derive(Insert)] 33 | #[ormlite(returns = "User")] 34 | pub struct InsertUser2 { 35 | name: String, 36 | number: i32, 37 | #[ormlite(column = "type")] 38 | ty: i32, 39 | org_id: i32, 40 | } 41 | 42 | #[tokio::main] 43 | async fn main() { 44 | env_logger::init(); 45 | let mut db = ormlite::sqlite::SqliteConnection::connect(":memory:").await.unwrap(); 46 | let migration = setup::migrate_self(&[file!()]); 47 | for s in migration.statements { 48 | let sql = s.to_sql(sqlmo::Dialect::Sqlite); 49 | ormlite::query(&sql).execute(&mut db).await.unwrap(); 50 | } 51 | 52 | let org = Organization { 53 | id: 12321, 54 | name: "my org".to_string(), 55 | }; 56 | 57 | let champ = InsertUser { 58 | name: "Champ".to_string(), 59 | organization: Join::new(org.clone()), 60 | ty: 12, 61 | } 62 | .insert(&mut db) 63 | .await 64 | .unwrap(); 65 | 66 | assert_eq!(champ.id, 1); 67 | assert_eq!(champ.secret, None); 68 | assert_eq!(champ.number, 5); 69 | assert_eq!(champ.organization.id, 12321); 70 | assert_eq!(champ.organization.name, "my org"); 71 | 72 | let champ_copy = InsertUser { 73 | name: "Champ".to_string(), 74 | organization: Join::new(org.clone()), 75 | ty: 12, 76 | }; 77 | let champ_json = json!(champ_copy).to_string(); 78 | 79 | assert_eq!( 80 | champ_json, 81 | r#"{"name":"Champ","organization":{"id":12321,"name":"my org"},"ty":12}"# 82 | ); 83 | 84 | let champ_deserializing = 85 | serde_json::from_str::(r#"{"name":"Champ","organization":{"id":12321,"name":"my org"},"ty":12}"#); 86 | 87 | let Ok(champ_deserialized) = champ_deserializing else { 88 | panic!("Deserialize failing"); 89 | }; 90 | 91 | assert_eq!(champ_deserialized.name, champ_copy.name); 92 | assert_eq!(champ_deserialized.organization.name, champ_copy.organization.name); 93 | 94 | let millie = InsertUser { 95 | name: "Millie".to_string(), 96 | organization: Join::new(org), 97 | ty: 3, 98 | } 99 | .insert(&mut db) 100 | .await 101 | .unwrap(); 102 | assert_eq!(millie.id, 2); 103 | assert_eq!(millie.secret, None); 104 | assert_eq!(millie.number, 5); 105 | assert_eq!(millie.organization.id, 12321); 106 | assert_eq!(millie.organization.name, "my org"); 107 | 108 | let enoki = InsertUser { 109 | name: "Enoki".to_string(), 110 | organization: Join::new_with_id(12321), 111 | ty: 6, 112 | } 113 | .insert(&mut db) 114 | .await 115 | .unwrap(); 116 | assert_eq!(enoki.id, 3); 117 | assert_eq!(enoki.secret, None); 118 | assert_eq!(enoki.number, 5); 119 | assert_eq!(enoki.organization.id, 12321); 120 | assert_eq!(enoki.organization.loaded(), false); 121 | 122 | let user = InsertUser2 { 123 | name: "Kloud".to_string(), 124 | number: 12, 125 | ty: 8, 126 | org_id: 12321, 127 | } 128 | .insert(&mut db) 129 | .await 130 | .unwrap(); 131 | assert_eq!(user.id, 4); 132 | assert_eq!(user.name, "Kloud"); 133 | let user = User::fetch_one(4, &mut db).await.unwrap(); 134 | assert_eq!(user.id, 4); 135 | assert_eq!(user.name, "Kloud"); 136 | assert_eq!(user.ty, 8); 137 | assert_eq!(user.organization.id, 12321); 138 | 139 | let orgs = vec![ 140 | Organization { 141 | id: 1, 142 | name: "bulk-org1".to_string(), 143 | }, 144 | Organization { 145 | id: 2, 146 | name: "bulk-org2".to_string(), 147 | }, 148 | Organization { 149 | id: 3, 150 | name: "bulk-org3".to_string(), 151 | }, 152 | ]; 153 | let orgs = Organization::insert_many(orgs, &mut db).await.unwrap(); 154 | assert_eq!(orgs.len(), 3); 155 | } 156 | --------------------------------------------------------------------------------