├── run_clippy.sh ├── .gitignore ├── generate_readme.sh ├── test_pg.sh ├── install-postgresql-client.sh ├── create_sqlite_sakila.sh ├── test.sh ├── update_readme.sh ├── crates ├── dao │ ├── src │ │ ├── common.rs │ │ ├── error.rs │ │ ├── interval.rs │ │ ├── lib.rs │ │ ├── column_name.rs │ │ ├── table_name.rs │ │ ├── rows.rs │ │ ├── dao.rs │ │ └── value.rs │ └── Cargo.toml └── codegen │ ├── Cargo.toml │ └── src │ ├── table_derive.rs │ ├── lib.rs │ ├── column_derive.rs │ └── dao_derive.rs ├── TODO.md ├── bootstrap-test.sh ├── gather-prerequisites.sh ├── src ├── util.rs ├── db_auth │ ├── mod.rs │ └── previlege.rs ├── database.rs ├── lib.rs ├── dao_manager.rs ├── pg │ ├── interval.rs │ ├── numeric.rs │ └── table_info.rs ├── platform.rs ├── types.rs ├── column.rs ├── error.rs ├── common.rs ├── table.rs ├── entity.rs ├── pool.rs └── my │ └── mod.rs ├── execute-data-import.sh ├── examples ├── show_tables_mysql.rs ├── simple_select.rs ├── select_usage_sqlite.rs ├── select_usage.rs ├── update_usage_mysql.rs ├── derive_dao_usage.rs ├── select_usage_mysql.rs ├── insert_usage.rs ├── insert_usage_mysql.rs └── insert_to_sqlite.rs ├── .travis.yml ├── import-sakila-pg.sh ├── rustfmt.toml ├── rename-database-host.sh ├── .github ├── workflows │ └── rust.yml └── FUNDING.yml ├── .gitlab-ci.yml ├── LICENSE ├── Cargo.toml ├── Changelog.md └── README.md /run_clippy.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | set -v 3 | 4 | cargo clippy --all-features 5 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | /target/ 2 | **/target/ 3 | **/*.rs.bk 4 | sakila.db 5 | Cargo.lock 6 | -------------------------------------------------------------------------------- /generate_readme.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | set -v 3 | 4 | cargo readme > README.md 5 | -------------------------------------------------------------------------------- /test_pg.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | set -v 3 | 4 | cargo test --features "with-postgres" 5 | -------------------------------------------------------------------------------- /install-postgresql-client.sh: -------------------------------------------------------------------------------- 1 | 2 | ## update and install postgresql client 3 | apt update 4 | apt install -y postgresql-client 5 | -------------------------------------------------------------------------------- /create_sqlite_sakila.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | rm sakila.db 4 | sqlite3 sakila.db < ../sakila/sqlite-sakila-db/sqlite-sakila-schema.sql 5 | sqlite3 sakila.db < ../sakila/sqlite-sakila-db/sqlite-sakila-insert-data.sql 6 | -------------------------------------------------------------------------------- /test.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | set -v 3 | 4 | cargo test --features "with-postgres with-sqlite" 5 | cargo test --features "with-postgres" 6 | cargo test --features "with-sqlite" 7 | cargo test --features "with-mysql" 8 | -------------------------------------------------------------------------------- /update_readme.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | 4 | 5 | # Install cargo-readme if it isn't installed yet 6 | if ! type cargo-readme > /dev/null; then 7 | cargo install cargo-readme 8 | fi 9 | 10 | cargo readme > README.md 11 | 12 | -------------------------------------------------------------------------------- /crates/dao/src/common.rs: -------------------------------------------------------------------------------- 1 | fn is_keyword(s: &str) -> bool { 2 | let keywords = ["user", "role"]; 3 | keywords.contains(&s) 4 | } 5 | 6 | pub fn keywords_safe(s: &str) -> String { 7 | if is_keyword(s) { 8 | format!("\"{}\"", s) 9 | } else { 10 | s.to_string() 11 | } 12 | } 13 | -------------------------------------------------------------------------------- /TODO.md: -------------------------------------------------------------------------------- 1 | # TODO 2 | - [ ] Implement the table_info extraction for mysql. 3 | - [ ] building the SQL statement should return the sql as string and the arg_value separately 4 | - this is an easy way to mitigate SQL injection 5 | 6 | - [X] **breaking** Rename Table to TableDef and Column to ColumnDef, it is a more appropriate name 7 | - [ ] Support for transactions 8 | -------------------------------------------------------------------------------- /bootstrap-test.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | set -v 3 | 4 | cd ../ && sh ./dbscripts/setup.sh && cd - 5 | 6 | pwd 7 | 8 | if ! type sqlite3 > /dev/null; then 9 | sudo apt install -y sqlite3 10 | fi 11 | 12 | rm sakila.db 13 | sqlite3 sakila.db < ../sakila/sqlite-sakila-db/sqlite-sakila-schema.sql 14 | sqlite3 sakila.db < ../sakila/sqlite-sakila-db/sqlite-sakila-insert-data.sql 15 | -------------------------------------------------------------------------------- /gather-prerequisites.sh: -------------------------------------------------------------------------------- 1 | 2 | 3 | ## Clone bazaar for the bazaar_v8 database 4 | cd .. && git clone https://github.com/ivanceras/bazaar 5 | cd - 6 | 7 | ## Clone sakila for sakila sample database 8 | cd .. && git clone https://github.com/ivanceras/sakila 9 | cd - 10 | 11 | ### Clone data sql for dota-sql sample database 12 | cd .. && git clone https://github.com/ivanceras/dota-sql 13 | cd - 14 | 15 | -------------------------------------------------------------------------------- /crates/dao/src/error.rs: -------------------------------------------------------------------------------- 1 | use thiserror::Error; 2 | 3 | #[derive(Error, Debug)] 4 | pub enum ConvertError { 5 | #[error("Conversion not supported {0} to {1}")] 6 | NotSupported(String, String), 7 | } 8 | 9 | 10 | #[derive(Error, Debug)] 11 | pub enum DaoError { 12 | #[error("ConvertError {0}")] 13 | ConvertError(ConvertError), 14 | #[error("No such value {0}")] 15 | NoSuchValueError(String), 16 | } 17 | -------------------------------------------------------------------------------- /src/util.rs: -------------------------------------------------------------------------------- 1 | extern crate meval; 2 | 3 | fn trim_parenthesis(arg: &str) -> &str { 4 | arg.trim_start_matches('(').trim_end_matches(')') 5 | } 6 | 7 | pub fn maybe_trim_parenthesis(arg: &str) -> &str { 8 | if arg.starts_with('(') && arg.ends_with(')') { 9 | trim_parenthesis(arg) 10 | } else { 11 | arg 12 | } 13 | } 14 | 15 | pub fn eval_f64(expr: &str) -> Result { 16 | meval::eval_str(expr) 17 | } 18 | -------------------------------------------------------------------------------- /execute-data-import.sh: -------------------------------------------------------------------------------- 1 | 2 | 3 | ### run data import script from dato-sql 4 | cd ../dota-sql/data/ && sh reimport.sh 5 | cd - 6 | 7 | ### Run data import script from bazaar 8 | cd ../bazaar/scripts && sh setup.sh 9 | cd - 10 | 11 | ### Copy the pre-created sqlite sakila.db for testing rustorm "with-sqlite" feature 12 | cp ../sakila/sqlite-sakila-db/sakila.db . 13 | 14 | 15 | ### Execute the import of sakila database for postgresql 16 | sh import-sakila-pg.sh 17 | -------------------------------------------------------------------------------- /examples/show_tables_mysql.rs: -------------------------------------------------------------------------------- 1 | use rustorm::{DbError, FromDao, Pool, Rows, ToColumnNames, ToTableName}; 2 | 3 | fn main() { 4 | let db_url = "mysql://root:r00t@localhost/sakila"; 5 | let mut pool = Pool::new(); 6 | let mut dm = pool 7 | .dm(db_url) 8 | .expect("Should be able to get a connection here.."); 9 | let sql = "SHOW TABLES"; 10 | let rows: Result = dm.execute_sql_with_return(sql, &[]); 11 | println!("rows: {:#?}", rows); 12 | } 13 | -------------------------------------------------------------------------------- /crates/dao/src/interval.rs: -------------------------------------------------------------------------------- 1 | use serde_derive::{ 2 | Deserialize, 3 | Serialize, 4 | }; 5 | 6 | #[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] 7 | pub struct Interval { 8 | pub microseconds: i64, 9 | pub days: i32, 10 | pub months: i32, 11 | } 12 | 13 | impl Interval { 14 | pub fn new(microseconds: i64, days: i32, months: i32) -> Self { 15 | Interval { 16 | microseconds, 17 | days, 18 | months, 19 | } 20 | } 21 | } 22 | -------------------------------------------------------------------------------- /.travis.yml: -------------------------------------------------------------------------------- 1 | os: 2 | - linux 3 | 4 | language: rust 5 | 6 | rust: 7 | - nightly 8 | - stable 9 | 10 | addons: 11 | postgresql: "9.5" 12 | apt: 13 | packages: 14 | - postgresql-client 15 | 16 | services: 17 | - postgresql 18 | - mysql 19 | 20 | before_script: 21 | - ./gather-prerequisites.sh 22 | - ./execute-data-import.sh 23 | 24 | 25 | 26 | script: 27 | - cargo test --features "with-postgres with-sqlite" 28 | - cargo test --features "with-postgres" 29 | - cargo test --features "with-sqlite" 30 | -------------------------------------------------------------------------------- /examples/simple_select.rs: -------------------------------------------------------------------------------- 1 | use rustorm::{DbError, Pool, Rows}; 2 | 3 | fn main() { 4 | let db_url = "postgres://postgres:p0stgr3s@localhost/sakila"; 5 | let mut pool = Pool::new(); 6 | let mut dm = pool.dm(db_url).unwrap(); 7 | let sql = "SELECT * FROM actor LIMIT 10"; 8 | let actors: Result = dm.execute_sql_with_return(sql, &[]); 9 | println!("Actor: {:#?}", actors); 10 | let actors = actors.unwrap(); 11 | assert_eq!(actors.iter().len(), 10); 12 | for actor in actors.iter() { 13 | println!("actor: {:?}", actor); 14 | } 15 | } 16 | -------------------------------------------------------------------------------- /crates/codegen/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "rustorm_codegen" 3 | version = "0.18.0" 4 | authors = [ "Jovansonlee Cesar " ] 5 | license = "MIT" 6 | description = "code gen provides macro for generating code on data access objects on table metadata" 7 | repository = "https://github.com/ivanceras/rustorm" 8 | documentation = "https://docs.rs/rustorm" 9 | keywords = ["orm", "postgresql", "sqlite"] 10 | edition = "2018" 11 | 12 | [dependencies] 13 | syn = "0.11.11" 14 | quote = "0.3.15" 15 | rustorm_dao = { path = "../dao", version = "0.18.0" } 16 | 17 | 18 | [lib] 19 | proc-macro = true 20 | -------------------------------------------------------------------------------- /crates/codegen/src/table_derive.rs: -------------------------------------------------------------------------------- 1 | use quote; 2 | use syn; 3 | 4 | pub fn impl_to_table_name(ast: &syn::MacroInput) -> quote::Tokens { 5 | let name = &ast.ident; 6 | let generics = &ast.generics; 7 | 8 | quote! { 9 | impl #generics rustorm_dao::ToTableName for #name #generics { 10 | fn to_table_name() -> rustorm_dao::TableName { 11 | rustorm_dao::TableName{ 12 | name: stringify!(#name).to_lowercase().into(), 13 | schema: None, 14 | alias: None, 15 | } 16 | } 17 | } 18 | } 19 | } 20 | -------------------------------------------------------------------------------- /import-sakila-pg.sh: -------------------------------------------------------------------------------- 1 | psql -U postgres -h localhost -c 'DROP DATABASE sakila;' 2 | psql -U postgres -h localhost -c 'CREATE DATABASE sakila;' 3 | psql -U postgres -h localhost -d sakila -f ../sakila/postgres-sakila-db/postgres-sakila-schema.sql 4 | psql -U postgres -h localhost -d sakila -f ../sakila/postgres-sakila-db/postgres-sakila-data.sql 5 | 6 | psql -U postgres -h localhost -d sakila -f ../sakila/postgres-sakila-db/add_users_table.sql 7 | psql -U postgres -h localhost -d sakila -f ../sakila/postgres-sakila-db/add_composite_film_actor_award_foreign_key.sql 8 | 9 | psql -U postgres -h localhost -c "ALTER USER postgres WITH PASSWORD 'p0stgr3s';" 10 | 11 | -------------------------------------------------------------------------------- /crates/dao/src/lib.rs: -------------------------------------------------------------------------------- 1 | #![deny(warnings)] 2 | #![deny(clippy::all)] 3 | 4 | pub use column_name::{ 5 | ColumnName, 6 | ToColumnNames, 7 | }; 8 | pub use dao::{ 9 | Dao, 10 | FromDao, 11 | ToDao, 12 | }; 13 | pub use error::{ 14 | ConvertError, 15 | DaoError, 16 | }; 17 | pub use interval::Interval; 18 | pub use rows::Rows; 19 | pub use table_name::{ 20 | TableName, 21 | ToTableName, 22 | }; 23 | pub use value::{ 24 | Array, 25 | FromValue, 26 | ToValue, 27 | Value, 28 | }; 29 | 30 | mod column_name; 31 | mod common; 32 | mod dao; 33 | mod error; 34 | mod interval; 35 | mod rows; 36 | mod table_name; 37 | pub mod value; 38 | -------------------------------------------------------------------------------- /examples/select_usage_sqlite.rs: -------------------------------------------------------------------------------- 1 | use rustorm::{DbError, FromDao, Pool, ToColumnNames, ToTableName}; 2 | 3 | #[derive(Debug, FromDao, ToColumnNames, ToTableName)] 4 | struct Actor { 5 | actor_id: i32, 6 | first_name: String, 7 | } 8 | 9 | fn main() { 10 | let db_url = "sqlite://sakila.db"; 11 | let mut pool = Pool::new(); 12 | let mut em = pool.em(db_url).unwrap(); 13 | let sql = "SELECT * FROM actor LIMIT 10"; 14 | let actors: Result, DbError> = em.execute_sql_with_return(sql, &[]); 15 | println!("Actor: {:#?}", actors); 16 | let actors = actors.unwrap(); 17 | assert_eq!(actors.len(), 10); 18 | for actor in actors { 19 | println!("actor: {:?}", actor); 20 | } 21 | } 22 | -------------------------------------------------------------------------------- /rustfmt.toml: -------------------------------------------------------------------------------- 1 | # Use unstable features 2 | unstable_features = true 3 | 4 | ## columns size is 100 before wrapping 5 | max_width = 100 6 | 7 | ## reorder the imports, Default is true 8 | reorder_imports = true 9 | ## 10 | reorder_impl_items = true 11 | ## Merge the imports statements 12 | merge_imports = true 13 | ## I want to be able to add and delete unused imports easily 14 | imports_layout = "Vertical" 15 | ## Default value is false, yet clipy keeps nagging on this 16 | use_field_init_shorthand = true 17 | 18 | ## also format macro 19 | format_macro_matchers = true 20 | force_multiline_blocks = true 21 | 22 | overflow_delimited_expr = true 23 | fn_single_line = true 24 | ## reorder impl blocks 25 | blank_lines_upper_bound = 3 26 | -------------------------------------------------------------------------------- /rename-database-host.sh: -------------------------------------------------------------------------------- 1 | 2 | ## set the `localhost` to `postgres` databse url in dota-sql database scripts 3 | cd ../dota-sql/ 4 | find . -name '*.sh' -type f -exec sed -i 's/localhost/postgres/g' {} + 5 | cd - 6 | 7 | ## set the `localhost` to `postgres` databse url in bazaar database scripts 8 | cd ../bazaar/ 9 | find . -name '*.sh' -type f -exec sed -i 's/localhost/postgres/g' {} + 10 | cd - 11 | 12 | ## set the `localhost` to `postgres` databse url in script files in rustorm project 13 | find . -name '*.sh' -type f -exec sed -i 's/localhost/postgres/g' {} + 14 | 15 | ## set the `localhost` to `postgres` databse url in rustorm project files 16 | find . -name '*.rs' -type f -exec sed -i 's/localhost/postgres/g' {} + 17 | 18 | 19 | 20 | -------------------------------------------------------------------------------- /examples/select_usage.rs: -------------------------------------------------------------------------------- 1 | use rustorm::{DbError, FromDao, Pool, ToColumnNames, ToTableName}; 2 | 3 | #[derive(Debug, FromDao, ToColumnNames, ToTableName)] 4 | struct Actor { 5 | actor_id: i32, 6 | first_name: String, 7 | } 8 | 9 | fn main() { 10 | let db_url = "postgres://postgres:p0stgr3s@localhost/sakila"; 11 | let mut pool = Pool::new(); 12 | let mut em = pool.em(db_url).unwrap(); 13 | let sql = "SELECT * FROM actor LIMIT 10"; 14 | let actors: Result, DbError> = em.execute_sql_with_return(sql, &[]); 15 | println!("Actor: {:#?}", actors); 16 | let actors = actors.unwrap(); 17 | assert_eq!(actors.len(), 10); 18 | for actor in actors { 19 | println!("actor: {:?}", actor); 20 | } 21 | } 22 | -------------------------------------------------------------------------------- /crates/dao/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "rustorm_dao" 3 | version = "0.18.0" 4 | authors = [ "Jovansonlee Cesar " ] 5 | license = "MIT" 6 | description = "Dao provides a flexible way to access data from the database" 7 | repository = "https://github.com/ivanceras/rustorm" 8 | documentation = "https://docs.rs/rustorm" 9 | keywords = ["orm", "postgresql", "sqlite"] 10 | edition = "2018" 11 | 12 | [dependencies] 13 | chrono = {version = "0.4.0", features = ["serde"]} 14 | uuid = {version = "0.5.1", features = ["serde", "v4"]} 15 | serde = "1.0.15" 16 | serde_derive = "1.0.15" 17 | serde_json = "1.0.3" 18 | bigdecimal = {version = "0.0.14", features = ["serde"]} 19 | geo = "0.4" 20 | time = "0.1.14" 21 | thiserror = "1.0.3" 22 | base64 = "0.9" 23 | -------------------------------------------------------------------------------- /examples/update_usage_mysql.rs: -------------------------------------------------------------------------------- 1 | use rustorm::{DbError, FromDao, Pool, Rows, ToColumnNames, ToTableName}; 2 | 3 | /// Run using: 4 | /// ``` 5 | /// cargo run --example update_usage_mysql --features "with-mysql" 6 | /// ``` 7 | 8 | fn main() { 9 | let db_url = "mysql://root:r00tpwdh3r3@localhost/sakila"; 10 | let mut pool = Pool::new(); 11 | pool.ensure(db_url); 12 | let mut em = pool 13 | .em(db_url) 14 | .expect("Should be able to get a connection here.."); 15 | let sql = "UPDATE actor SET last_name = ? WHERE first_name = ?".to_string(); 16 | let rows: Result = em 17 | .db() 18 | .execute_sql_with_return(&sql, &[&"JONES".into(), &"TOM".into()]); 19 | println!("rows: {:#?}", rows); 20 | } 21 | -------------------------------------------------------------------------------- /.github/workflows/rust.yml: -------------------------------------------------------------------------------- 1 | name: Rust 2 | 3 | on: [push] 4 | 5 | jobs: 6 | build: 7 | 8 | runs-on: ubuntu-latest 9 | 10 | steps: 11 | - uses: actions/checkout@v1 12 | - name: Install dependencies 13 | run: sudo apt install -y sqlite3 libsqlite3-0 libsqlite3-dev 14 | - name: Checkout sakila 15 | run: | 16 | git clone https://github.com/ivanceras/sakila.git 17 | sqlite3 sakila.db < sakila/sqlite-sakila-db/sqlite-sakila-schema.sql 18 | sqlite3 sakila.db < sakila/sqlite-sakila-db/sqlite-sakila-insert-data.sql 19 | - name: Build 20 | run: cargo build --all --features "with-postgres with-sqlite" 21 | - name: Run tests 22 | run: cargo test --all --all-features "with-postgres with-sqlite" 23 | -------------------------------------------------------------------------------- /examples/derive_dao_usage.rs: -------------------------------------------------------------------------------- 1 | use rustorm::{FromDao, ToDao, ToTableName}; 2 | 3 | #[derive(Debug, FromDao, ToDao, ToTableName)] 4 | struct User { 5 | id: i32, 6 | username: String, 7 | } 8 | 9 | fn main() { 10 | // imported here since we are using the trait methods 11 | // `to_dao` and `to_table_name` without 12 | // conflicting with the derive ToDao and ToTableName macro 13 | use rustorm::dao::{ToDao, ToTableName}; 14 | 15 | let user = User { 16 | id: 1, 17 | username: "ivanceras".to_string(), 18 | }; 19 | println!("user: {:#?}", user); 20 | let dao = user.to_dao(); 21 | println!("dao: {:#?}", dao); 22 | let table = User::to_table_name(); 23 | println!("table name: {}", table.name); 24 | println!("table: {:#?}", table); 25 | } 26 | -------------------------------------------------------------------------------- /src/db_auth/mod.rs: -------------------------------------------------------------------------------- 1 | use chrono::{DateTime, Utc}; 2 | use rustorm_codegen::FromDao; 3 | use serde::{Deserialize, Serialize}; 4 | 5 | #[allow(unused)] 6 | mod previlege; 7 | 8 | /// This is the user object mapped from pg_authid 9 | #[derive(Debug, Serialize, Deserialize, FromDao)] 10 | pub struct User { 11 | pub sysid: i32, 12 | pub username: String, 13 | pub is_superuser: bool, 14 | pub is_inherit: bool, 15 | pub can_create_db: bool, 16 | pub can_create_role: bool, 17 | pub can_login: bool, 18 | pub can_do_replication: bool, 19 | pub can_bypass_rls: bool, 20 | pub valid_until: Option>, 21 | pub conn_limit: Option, 22 | } 23 | 24 | #[derive(Debug, Serialize, Deserialize, FromDao)] 25 | pub struct Role { 26 | pub role_name: String, 27 | } 28 | -------------------------------------------------------------------------------- /.github/FUNDING.yml: -------------------------------------------------------------------------------- 1 | # These are supported funding model platforms 2 | 3 | #github: ivanceras # Replace with up to 4 GitHub Sponsors-enabled usernames e.g., [user1, user2] 4 | patreon: ivanceras # Replace with a single Patreon username 5 | open_collective: ivanceras # Replace with a single Open Collective username 6 | #ko_fi: # Replace with a single Ko-fi username 7 | #tidelift: # Replace with a single Tidelift platform-name/package-name e.g., npm/babel 8 | #community_bridge: # Replace with a single Community Bridge project-name e.g., cloud-foundry 9 | #liberapay: # Replace with a single Liberapay username 10 | #issuehunt: # Replace with a single IssueHunt username 11 | #otechie: # Replace with a single Otechie username 12 | #custom: # Replace with up to 4 custom sponsorship URLs e.g., ['link1', 'link2'] 13 | -------------------------------------------------------------------------------- /examples/select_usage_mysql.rs: -------------------------------------------------------------------------------- 1 | use rustorm::{DbError, FromDao, Pool, ToColumnNames, ToTableName}; 2 | 3 | #[derive(Debug, FromDao, ToColumnNames, ToTableName)] 4 | struct Actor { 5 | actor_id: i32, 6 | first_name: String, 7 | } 8 | 9 | fn main() { 10 | let db_url = "mysql://root:r00t@localhost/sakila"; 11 | let mut pool = Pool::new(); 12 | let mut em = pool 13 | .em(db_url) 14 | .expect("Should be able to get a connection here.."); 15 | let sql = "SELECT * FROM actor LIMIT 10"; 16 | let actors: Result, DbError> = em.execute_sql_with_return(sql, &[]); 17 | println!("Actor: {:#?}", actors); 18 | let actors = actors.unwrap(); 19 | assert_eq!(actors.len(), 10); 20 | for actor in actors { 21 | println!("actor: {:?}", actor); 22 | } 23 | } 24 | -------------------------------------------------------------------------------- /src/db_auth/previlege.rs: -------------------------------------------------------------------------------- 1 | use crate::{db_auth::User, ColumnName, TableName}; 2 | 3 | /// User can have previlege to tables, to columns 4 | /// The table models can be filtered depending on how much 5 | /// and which columns it has privilege 6 | enum Privilege { 7 | Select, 8 | Insert, 9 | Update, 10 | Delete, 11 | Create, 12 | Drop, 13 | Truncate, 14 | Connect, 15 | Execute, 16 | } 17 | 18 | /// 19 | /// CREATE TABLE user_privilege( 20 | /// user_id int, 21 | /// schema text, 22 | /// table_name text, 23 | /// columns text[], -- if no column mentioned, then the user has priviledge to all of the table columns 24 | /// privilege text[], 25 | /// ) 26 | /// User privileges for each tables 27 | struct UserPrivilege { 28 | user: User, 29 | table_name: TableName, 30 | column_names: Vec, 31 | privilege: Vec, 32 | } 33 | -------------------------------------------------------------------------------- /.gitlab-ci.yml: -------------------------------------------------------------------------------- 1 | stages: 2 | - build 3 | 4 | 5 | rust-latest: 6 | stage: build 7 | image: rust:latest 8 | services: 9 | - postgres:latest 10 | 11 | before_script: 12 | - ./gather-prerequisites.sh 13 | - ./install-postgresql-client.sh 14 | - ./rename-database-host.sh 15 | - ./execute-data-import.sh 16 | script: 17 | - cargo test --features "with-postgres with-sqlite" 18 | - cargo test --features "with-postgres" 19 | - cargo test --features "with-sqlite" 20 | 21 | rust-nightly: 22 | stage: build 23 | image: rustlang/rust:nightly 24 | services: 25 | - postgres:latest 26 | 27 | before_script: 28 | - ./gather-prerequisites.sh 29 | - ./install-postgresql-client.sh 30 | - ./rename-database-host.sh 31 | - ./execute-data-import.sh 32 | 33 | script: 34 | - cargo test --features "with-postgres with-sqlite" 35 | - cargo test --features "with-postgres" 36 | - cargo test --features "with-sqlite" 37 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Copyright (c) 2018 Jovansonlee Cesar and other contributors 2 | 3 | Permission is hereby granted, free of charge, to any person obtaining a copy 4 | of this software and associated documentation files (the "Software"), to deal 5 | in the Software without restriction, including without limitation the rights 6 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 7 | copies of the Software, and to permit persons to whom the Software is 8 | furnished to do so, subject to the following conditions: 9 | 10 | The above copyright notice and this permission notice shall be included in 11 | all copies or substantial portions of the Software. 12 | 13 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 14 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 15 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 16 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 17 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 18 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN 19 | THE SOFTWARE. 20 | -------------------------------------------------------------------------------- /crates/codegen/src/lib.rs: -------------------------------------------------------------------------------- 1 | #![deny(warnings)] 2 | #![deny(clippy::all)] 3 | 4 | extern crate proc_macro; 5 | #[macro_use] 6 | extern crate quote; 7 | extern crate rustorm_dao; 8 | extern crate syn; 9 | 10 | #[macro_use] 11 | mod column_derive; 12 | #[macro_use] 13 | mod dao_derive; 14 | #[macro_use] 15 | mod table_derive; 16 | 17 | use proc_macro::TokenStream; 18 | 19 | #[proc_macro_derive(FromDao)] 20 | pub fn from_dao(input: TokenStream) -> TokenStream { 21 | let s = input.to_string(); 22 | let ast = syn::parse_macro_input(&s).unwrap(); 23 | let gen = dao_derive::impl_from_dao(&ast); 24 | gen.parse().unwrap() 25 | } 26 | 27 | #[proc_macro_derive(ToDao)] 28 | pub fn to_dao(input: TokenStream) -> TokenStream { 29 | let s = input.to_string(); 30 | let ast = syn::parse_macro_input(&s).unwrap(); 31 | let gen = dao_derive::impl_to_dao(&ast); 32 | gen.parse().unwrap() 33 | } 34 | 35 | #[proc_macro_derive(ToTableName)] 36 | pub fn to_table_name(input: TokenStream) -> TokenStream { 37 | let s = input.to_string(); 38 | let ast = syn::parse_macro_input(&s).unwrap(); 39 | let gen = table_derive::impl_to_table_name(&ast); 40 | gen.parse().unwrap() 41 | } 42 | 43 | #[proc_macro_derive(ToColumnNames)] 44 | pub fn to_column_names(input: TokenStream) -> TokenStream { 45 | let s = input.to_string(); 46 | let ast = syn::parse_macro_input(&s).unwrap(); 47 | let gen = column_derive::impl_to_column_names(&ast); 48 | gen.parse().unwrap() 49 | } 50 | -------------------------------------------------------------------------------- /crates/codegen/src/column_derive.rs: -------------------------------------------------------------------------------- 1 | use quote; 2 | use syn; 3 | 4 | pub fn impl_to_column_names(ast: &syn::MacroInput) -> quote::Tokens { 5 | let name = &ast.ident; 6 | let generics = &ast.generics; 7 | let fields: Vec<(&syn::Ident, &syn::Ty)> = match ast.body { 8 | syn::Body::Struct(ref data) => { 9 | match *data { 10 | syn::VariantData::Struct(ref fields) => { 11 | fields 12 | .iter() 13 | .map(|f| { 14 | let ident = f.ident.as_ref().unwrap(); 15 | let ty = &f.ty; 16 | (ident, ty) 17 | }) 18 | .collect::>() 19 | } 20 | _ => panic!("Only struct is supported for #[derive(ToColumnNames)]"), 21 | } 22 | } 23 | syn::Body::Enum(_) => panic!("#[derive(ToColumnNames)] can only be used with structs"), 24 | }; 25 | let from_fields: Vec = fields 26 | .iter() 27 | .map(|&(field, _ty)| { 28 | quote! { 29 | rustorm_dao::ColumnName { 30 | name: stringify!(#field).into(), 31 | table: Some(stringify!(#name).to_lowercase().into()), 32 | alias: None, 33 | }, 34 | } 35 | }) 36 | .collect(); 37 | 38 | quote! { 39 | impl #generics rustorm_dao::ToColumnNames for #name #generics { 40 | fn to_column_names() -> Vec { 41 | vec![ 42 | #(#from_fields)* 43 | ] 44 | } 45 | } 46 | } 47 | } 48 | -------------------------------------------------------------------------------- /Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "rustorm" 3 | version = "0.19.0" 4 | authors = [ "Jovansonlee Cesar " ] 5 | license = "MIT" 6 | description = "A complete rewrite of rustorm" 7 | repository = "https://github.com/ivanceras/rustorm" 8 | documentation = "https://docs.rs/rustorm" 9 | readme = "README.md" 10 | keywords = ["orm", "postgresql", "sqlite"] 11 | edition = "2018" 12 | 13 | 14 | [package.metadata.docs.rs] 15 | features = ["with-postgres"] 16 | 17 | [dependencies] 18 | log = "0.4" 19 | postgres = {version = "0.15.1",features = ["with-uuid","with-chrono","with-time","with-geo","with-bit-vec", "with-serde_json"], optional = true} 20 | r2d2_postgres = {version = "0.14.0", optional = true} 21 | postgres-shared = {version = "0.4.0", optional = true} 22 | r2d2 = "0.8" 23 | url = "1.5" 24 | cfg-if = "0.1.2" 25 | uuid = "0.5.1" 26 | chrono = "0.4.0" 27 | rusqlite = {version = "0.21", optional = true} 28 | r2d2_sqlite = {version = "0.14", optional = true} 29 | serde = { version = "1.0.15", features = ["derive"] } 30 | serde_json = "1.0.3" 31 | byteorder = "1.0" 32 | bigdecimal = "0.0.14" 33 | num-bigint = "0.2" 34 | num-traits = "0.2" 35 | num-integer = "0.1" 36 | meval = "0.1.0" 37 | geo = "0.4" 38 | time = "0.1.14" 39 | rustorm_dao = { path = "crates/dao", version = "0.18.0" } 40 | rustorm_codegen = { path = "crates/codegen", version = "0.18.0" } 41 | r2d2_mysql = {version = "16.0.0", optional = true} 42 | thiserror = "1.0.3" 43 | 44 | 45 | 46 | [badges] 47 | travis-ci = { repository = "ivanceras/rustorm" } 48 | 49 | [features] 50 | #default = ["with-postgres"] 51 | with-postgres = ["postgres", "r2d2_postgres", "postgres-shared"] 52 | with-sqlite = ["rusqlite","r2d2_sqlite"] 53 | with-mysql = ["r2d2_mysql"] 54 | db-auth = [] # Use the database as authentication server for the user 55 | 56 | -------------------------------------------------------------------------------- /crates/dao/src/column_name.rs: -------------------------------------------------------------------------------- 1 | use crate::common; 2 | use serde_derive::{ 3 | Deserialize, 4 | Serialize, 5 | }; 6 | 7 | #[derive(Debug, Serialize, Deserialize, PartialEq, Clone)] 8 | pub struct ColumnName { 9 | pub name: String, 10 | pub table: Option, 11 | pub alias: Option, 12 | } 13 | 14 | impl ColumnName { 15 | /// create table with name 16 | pub fn from(arg: &str) -> Self { 17 | if arg.contains('.') { 18 | let splinters = arg.split('.').collect::>(); 19 | assert!( 20 | splinters.len() == 2, 21 | "There should only be 2 parts, trying to split `.` {}", 22 | arg 23 | ); 24 | let table = splinters[0].to_owned(); 25 | let name = splinters[1].to_owned(); 26 | ColumnName { 27 | name, 28 | table: Some(table), 29 | alias: None, 30 | } 31 | } else { 32 | ColumnName { 33 | name: arg.to_owned(), 34 | table: None, 35 | alias: None, 36 | } 37 | } 38 | } 39 | 40 | /// return the long name of the table using schema.table_name 41 | pub fn complete_name(&self) -> String { 42 | match self.table { 43 | Some(ref table) => format!("{}.{}", table, self.name), 44 | None => self.name.to_owned(), 45 | } 46 | } 47 | 48 | pub fn safe_complete_name(&self) -> String { 49 | match self.table { 50 | Some(ref table) => format!("{}.{}", common::keywords_safe(table), self.name), 51 | None => self.name.to_owned(), 52 | } 53 | } 54 | } 55 | 56 | pub trait ToColumnNames { 57 | /// extract the columns from struct 58 | fn to_column_names() -> Vec; 59 | } 60 | -------------------------------------------------------------------------------- /src/database.rs: -------------------------------------------------------------------------------- 1 | #[cfg(feature = "db-auth")] 2 | use crate::db_auth::{Role, User}; 3 | use crate::{table::SchemaContent, DbError, Rows, TableDef, TableName, Value}; 4 | use rustorm_codegen::FromDao; 5 | use serde::Serialize; 6 | 7 | /// The current database name and its comment 8 | #[derive(Serialize, FromDao)] 9 | pub struct DatabaseName { 10 | pub(crate) name: String, 11 | pub(crate) description: Option, 12 | } 13 | 14 | pub trait Database { 15 | fn begin_transaction(&mut self) -> Result<(), DbError>; 16 | 17 | fn commit_transaction(&mut self) -> Result<(), DbError>; 18 | 19 | fn rollback_transaction(&mut self) -> Result<(), DbError>; 20 | 21 | fn execute_sql_with_return(&mut self, sql: &str, param: &[&Value]) -> Result; 22 | 23 | fn get_table(&mut self, table_name: &TableName) -> Result, DbError>; 24 | 25 | fn set_autoincrement_value( 26 | &mut self, 27 | table_name: &TableName, 28 | sequence_value: i64, 29 | ) -> Result, DbError>; 30 | 31 | fn get_autoincrement_last_value( 32 | &mut self, 33 | table_name: &TableName, 34 | ) -> Result, DbError>; 35 | 36 | fn get_all_tables(&mut self) -> Result, DbError>; 37 | 38 | fn get_tablenames(&mut self) -> Result, DbError>; 39 | 40 | fn get_grouped_tables(&mut self) -> Result, DbError>; 41 | 42 | fn get_database_name(&mut self) -> Result, DbError>; 43 | 44 | #[cfg(feature = "db-auth")] 45 | fn get_users(&mut self) -> Result, DbError>; 46 | 47 | #[cfg(feature = "db-auth")] 48 | fn get_user_detail(&mut self, username: &str) -> Result, DbError>; 49 | 50 | #[cfg(feature = "db-auth")] 51 | fn get_roles(&mut self, username: &str) -> Result, DbError>; 52 | } 53 | -------------------------------------------------------------------------------- /examples/insert_usage.rs: -------------------------------------------------------------------------------- 1 | use chrono::{offset::Utc, DateTime}; 2 | use rustorm::{DbError, FromDao, Pool, ToColumnNames, ToDao, ToTableName}; 3 | 4 | fn main() { 5 | mod for_insert { 6 | use super::*; 7 | #[derive(Debug, PartialEq, ToDao, ToColumnNames, ToTableName)] 8 | pub struct Actor { 9 | pub first_name: String, 10 | pub last_name: String, 11 | } 12 | } 13 | 14 | mod for_retrieve { 15 | use super::*; 16 | #[derive(Debug, FromDao, ToColumnNames, ToTableName)] 17 | pub struct Actor { 18 | pub actor_id: i32, 19 | pub first_name: String, 20 | pub last_name: String, 21 | pub last_update: DateTime, 22 | } 23 | } 24 | 25 | let db_url = "postgres://postgres:p0stgr3s@localhost/sakila"; 26 | let mut pool = Pool::new(); 27 | let mut em = pool.em(db_url).unwrap(); 28 | let tom_cruise = for_insert::Actor { 29 | first_name: "TOM".into(), 30 | last_name: "CRUISE".to_string(), 31 | }; 32 | let tom_hanks = for_insert::Actor { 33 | first_name: "TOM".into(), 34 | last_name: "HANKS".to_string(), 35 | }; 36 | println!("tom_cruise: {:#?}", tom_cruise); 37 | println!("tom_hanks: {:#?}", tom_hanks); 38 | 39 | let actors: Result, DbError> = em.insert(&[&tom_cruise, &tom_hanks]); 40 | println!("Actor: {:#?}", actors); 41 | assert!(actors.is_ok()); 42 | let actors = actors.unwrap(); 43 | let today = Utc::now().date(); 44 | assert_eq!(tom_cruise.first_name, actors[0].first_name); 45 | assert_eq!(tom_cruise.last_name, actors[0].last_name); 46 | assert_eq!(today, actors[0].last_update.date()); 47 | assert_eq!(tom_hanks.first_name, actors[1].first_name); 48 | assert_eq!(tom_hanks.last_name, actors[1].last_name); 49 | assert_eq!(today, actors[1].last_update.date()); 50 | } 51 | -------------------------------------------------------------------------------- /Changelog.md: -------------------------------------------------------------------------------- 1 | # Unreleased 2 | 3 | # 0.19.0 4 | - Rename `Table` to `TableDef` and `Column` to `ColumnDef`, this is a more appropriate name since it is a TableDefinition and ColumnDefinition respectively 5 | - Convert `PlatformError` to `DataOpError` to avoid exposing the platform specific error 6 | 7 | # 0.18.0 8 | - Remove smarty algorithmn to cast blob image to data_uri, return as blob 9 | - Add a function to check if a column is a primary to the table 10 | - Implement setting and getting the autoincrement primary key of table for postgresql 11 | - The ColumnConstraint AutoIncrement now contains the name of its corresponding sequence 12 | - Add conversion of arrays to json 13 | - simplify the default value in enum columns 14 | - Implement displaying of text array 15 | - Expose get_tablenames in EntityManager 16 | - reexport uuid 17 | - revise the SQL statement for getting the column default as it was dropped in postgresql 12 18 | 19 | # 0.17.0 20 | - Update rusqlite to 0.21 21 | - Update r2d2_sqlite to 0.14 22 | - Fix database pool being created every time a connection is requested. 23 | 24 | # 0.16.0 25 | - Unify the interface for DatabaseMut + Database, EntityMut +Entity into their original name, 26 | - **Breaking change**: The query now requires the EntityManager to be passed as mutable. 27 | 28 | # 0.15.4 29 | - use thiserror for implementing Error in rustorm_dao 30 | - rename sq module to a more appropriate sqlite since it does not conflict with the used crate name of sqlite which is rustqlite 31 | # 0.15.3 32 | - implement FromValue for converting types that are not in the users' crate 33 | - remove panics on conversions 34 | - add supported parameter types 35 | - `Option<&'a str>` 36 | - `&Option` 37 | - implement conversion of numeric to bool 38 | - add support ToDao, ToTableName, ToColumnNames to borrowed field contained struct 39 | 40 | # 0.15.0 41 | - Mysql support 42 | - dao and codegen is not used as local path 43 | 44 | 45 | # 0.14.0 46 | - Remove dependency to openssl 47 | -------------------------------------------------------------------------------- /examples/insert_usage_mysql.rs: -------------------------------------------------------------------------------- 1 | use chrono::{offset::Utc, DateTime}; 2 | use rustorm::{pool, DbError, FromDao, Pool, ToColumnNames, ToDao, ToTableName}; 3 | 4 | /// Run using: 5 | /// ```sh 6 | /// cargo run --example insert_usage_mysql --features "with-mysql" 7 | /// ``` 8 | fn main() { 9 | mod for_insert { 10 | use super::*; 11 | #[derive(Debug, PartialEq, ToDao, ToColumnNames, ToTableName)] 12 | pub struct Actor { 13 | pub first_name: String, 14 | pub last_name: String, 15 | } 16 | } 17 | 18 | mod for_retrieve { 19 | use super::*; 20 | #[derive(Debug, FromDao, ToColumnNames, ToTableName)] 21 | pub struct Actor { 22 | pub actor_id: i32, 23 | pub first_name: String, 24 | pub last_name: String, 25 | pub last_update: DateTime, 26 | } 27 | } 28 | 29 | let db_url = "mysql://root:r00tpwdh3r3@localhost/sakila"; 30 | let mut pool = Pool::new(); 31 | pool.ensure(db_url); 32 | let mut em = pool.em(db_url).expect("Can not connect"); 33 | let tom_cruise = for_insert::Actor { 34 | first_name: "TOM".into(), 35 | last_name: "CRUISE".to_string(), 36 | }; 37 | let tom_hanks = for_insert::Actor { 38 | first_name: "TOM".into(), 39 | last_name: "HANKS".to_string(), 40 | }; 41 | println!("tom_cruise: {:#?}", tom_cruise); 42 | println!("tom_hanks: {:#?}", tom_hanks); 43 | 44 | let actors: Result, DbError> = em.insert(&[&tom_cruise, &tom_hanks]); 45 | println!("Actor: {:#?}", actors); 46 | assert!(actors.is_ok()); 47 | let actors = actors.unwrap(); 48 | let today = Utc::now().date(); 49 | assert_eq!(tom_cruise.first_name, actors[0].first_name); 50 | assert_eq!(tom_cruise.last_name, actors[0].last_name); 51 | assert_eq!(today, actors[0].last_update.date()); 52 | assert_eq!(tom_hanks.first_name, actors[1].first_name); 53 | assert_eq!(tom_hanks.last_name, actors[1].last_name); 54 | assert_eq!(today, actors[1].last_update.date()); 55 | } 56 | -------------------------------------------------------------------------------- /crates/dao/src/table_name.rs: -------------------------------------------------------------------------------- 1 | use crate::common; 2 | use serde_derive::{ 3 | Deserialize, 4 | Serialize, 5 | }; 6 | use std::hash::{ 7 | Hash, 8 | Hasher, 9 | }; 10 | 11 | #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] 12 | pub struct TableName { 13 | pub name: String, 14 | pub schema: Option, 15 | pub alias: Option, 16 | } 17 | 18 | impl Eq for TableName {} 19 | 20 | impl TableName { 21 | /// create table with name 22 | pub fn from(arg: &str) -> Self { 23 | if arg.contains('.') { 24 | let splinters = arg.split('.').collect::>(); 25 | assert!(splinters.len() == 2, "There should only be 2 parts"); 26 | let schema = splinters[0].to_owned(); 27 | let table = splinters[1].to_owned(); 28 | TableName { 29 | schema: Some(schema), 30 | name: table, 31 | alias: None, 32 | } 33 | } else { 34 | TableName { 35 | schema: None, 36 | name: arg.to_owned(), 37 | alias: None, 38 | } 39 | } 40 | } 41 | 42 | pub fn name(&self) -> String { self.name.to_owned() } 43 | 44 | pub fn safe_name(&self) -> String { common::keywords_safe(&self.name) } 45 | 46 | /// return the long name of the table using schema.table_name 47 | pub fn complete_name(&self) -> String { 48 | match self.schema { 49 | Some(ref schema) => format!("{}.{}", schema, self.name), 50 | None => self.name.to_owned(), 51 | } 52 | } 53 | 54 | pub fn safe_complete_name(&self) -> String { 55 | match self.schema { 56 | Some(ref schema) => format!("{}.{}", schema, self.safe_name()), 57 | None => self.name.to_owned(), 58 | } 59 | } 60 | } 61 | 62 | impl Hash for TableName { 63 | fn hash(&self, state: &mut H) { 64 | self.schema.hash(state); 65 | self.name.hash(state); 66 | } 67 | } 68 | 69 | pub trait ToTableName { 70 | /// extract the table name from a struct 71 | fn to_table_name() -> TableName; 72 | } 73 | -------------------------------------------------------------------------------- /src/lib.rs: -------------------------------------------------------------------------------- 1 | #![deny(warnings)] 2 | #![deny(clippy::all)] 3 | //! 4 | //! ## Rustorm 5 | //! 6 | //! [![Latest Version](https://img.shields.io/crates/v/rustorm.svg)](https://crates.io/crates/rustorm) 7 | //! [![Build Status](https://travis-ci.org/ivanceras/rustorm.svg?branch=master)](https://travis-ci.org/ivanceras/rustorm) 8 | //! [![MIT licensed](https://img.shields.io/badge/license-MIT-blue.svg)](./LICENSE) 9 | //! 10 | //! Rustorm is an SQL-centered ORM with focus on ease of use on conversion of database types to 11 | //! their appropriate rust type. 12 | //! 13 | //! Selecting records 14 | //! 15 | 16 | use cfg_if::cfg_if; 17 | 18 | cfg_if! {if #[cfg(feature = "with-postgres")]{ 19 | extern crate r2d2_postgres; 20 | extern crate postgres; 21 | #[macro_use] 22 | extern crate postgres_shared; 23 | mod pg; 24 | }} 25 | cfg_if! {if #[cfg(feature = "with-sqlite")]{ 26 | extern crate r2d2_sqlite; 27 | extern crate rusqlite; 28 | mod sqlite; 29 | }} 30 | cfg_if! {if #[cfg(feature = "with-mysql")]{ 31 | mod my; 32 | }} 33 | 34 | pub mod column; 35 | pub mod common; 36 | mod dao_manager; 37 | mod database; 38 | #[cfg(feature = "db-auth")] 39 | mod db_auth; 40 | mod entity; 41 | pub mod error; 42 | mod platform; 43 | pub mod pool; 44 | pub mod table; 45 | pub mod types; 46 | 47 | pub mod util; 48 | 49 | pub use chrono; 50 | pub use column::ColumnDef; 51 | pub use dao_manager::DaoManager; 52 | pub use database::{Database, DatabaseName}; 53 | pub use entity::EntityManager; 54 | pub use error::{DataError, DbError}; 55 | pub use platform::DBPlatform; 56 | pub use pool::Pool; 57 | pub use table::TableDef; 58 | pub use uuid::{self, Uuid}; 59 | 60 | // we export the traits that has a derived proc macro 61 | // this are used in the apps 62 | pub use codegen::{FromDao, ToColumnNames, ToDao, ToTableName}; 63 | 64 | pub use rustorm_dao::{ 65 | self, Array, ColumnName, ConvertError, Dao, FromValue, Rows, TableName, ToValue, Value, 66 | }; 67 | 68 | /// Wrap the rustorm_dao exports to avoid name conflict with the rustorm_codegen 69 | pub mod dao { 70 | pub use rustorm_dao::{FromDao, ToColumnNames, ToDao, ToTableName}; 71 | } 72 | 73 | /// Wrap the rustorm_codegen exports to avoid name conflict with the rustorm_dao 74 | pub mod codegen { 75 | pub use rustorm_codegen::{FromDao, ToColumnNames, ToDao, ToTableName}; 76 | } 77 | 78 | #[macro_use] 79 | extern crate log; 80 | -------------------------------------------------------------------------------- /src/dao_manager.rs: -------------------------------------------------------------------------------- 1 | use crate::{DBPlatform, Dao, DataError, DbError, Rows, Value}; 2 | 3 | /// an interface executing sql statement and getting the results as generic DAO values 4 | /// without any further conversion. 5 | pub struct DaoManager(pub DBPlatform); 6 | 7 | impl DaoManager { 8 | pub fn begin_transaction(&mut self) -> Result<(), DbError> { 9 | self.0.begin_transaction() 10 | } 11 | 12 | pub fn commit_transaction(&mut self) -> Result<(), DbError> { 13 | self.0.commit_transaction() 14 | } 15 | 16 | pub fn rollback_transaction(&mut self) -> Result<(), DbError> { 17 | self.0.rollback_transaction() 18 | } 19 | 20 | pub fn execute_sql_with_return( 21 | &mut self, 22 | sql: &str, 23 | params: &[&Value], 24 | ) -> Result { 25 | let rows = self.0.execute_sql_with_return(sql, params)?; 26 | Ok(rows) 27 | } 28 | 29 | pub fn execute_sql_with_records_return( 30 | &mut self, 31 | sql: &str, 32 | params: &[&Value], 33 | ) -> Result, DbError> { 34 | let rows = self.0.execute_sql_with_return(sql, params)?; 35 | let daos: Vec = rows.iter().collect(); 36 | Ok(daos) 37 | } 38 | 39 | pub fn execute_sql_with_one_return( 40 | &mut self, 41 | sql: &str, 42 | params: &[&Value], 43 | ) -> Result { 44 | let record: Result, DbError> = 45 | self.execute_sql_with_maybe_one_return(sql, params); 46 | match record { 47 | Ok(record) => match record { 48 | Some(record) => Ok(record), 49 | None => Err(DbError::DataError(DataError::ZeroRecordReturned)), 50 | }, 51 | Err(e) => Err(e), 52 | } 53 | } 54 | 55 | pub fn execute_sql_with_maybe_one_return( 56 | &mut self, 57 | sql: &str, 58 | params: &[&Value], 59 | ) -> Result, DbError> { 60 | let result: Result, DbError> = self.execute_sql_with_records_return(sql, params); 61 | match result { 62 | Ok(mut result) => match result.len() { 63 | 0 => Ok(None), 64 | 1 => Ok(Some(result.remove(0))), 65 | _ => Err(DbError::DataError(DataError::MoreThan1RecordReturned)), 66 | }, 67 | Err(e) => Err(e), 68 | } 69 | } 70 | } 71 | -------------------------------------------------------------------------------- /src/pg/interval.rs: -------------------------------------------------------------------------------- 1 | /// Copied from diesel 2 | /// 3 | /// Intervals in Postgres are separated into 3 parts. A 64 bit integer representing time in 4 | /// microseconds, a 32 bit integer representing number of days, and a 32 bit integer 5 | /// representing number of months. This struct is a dumb wrapper type, meant only to indicate the 6 | /// meaning of these parts. 7 | /// 8 | use byteorder::{BigEndian, ReadBytesExt}; 9 | use postgres::types::{self, FromSql, Type}; 10 | use std::error::Error; 11 | 12 | #[derive(Debug, Clone, PartialEq, Eq)] 13 | pub struct PgInterval { 14 | /// The number of whole microseconds 15 | pub microseconds: i64, 16 | /// The number of whole days 17 | pub days: i32, 18 | /// The number of whole months 19 | pub months: i32, 20 | } 21 | 22 | impl PgInterval { 23 | /// Constructs a new `PgInterval` 24 | /// 25 | /// No conversion occurs on the arguments. It is valid to provide a number 26 | /// of microseconds greater than the longest possible day, or a number of 27 | /// days greater than the longest possible month, as it is impossible to say 28 | /// how many months are in "40 days" without knowing a precise date. 29 | pub fn new(microseconds: i64, days: i32, months: i32) -> Self { 30 | PgInterval { 31 | microseconds, 32 | days, 33 | months, 34 | } 35 | } 36 | 37 | /// Equivalent to `new(microseconds, 0, 0)` 38 | pub fn from_microseconds(microseconds: i64) -> Self { 39 | Self::new(microseconds, 0, 0) 40 | } 41 | 42 | /// Equivalent to `new(0, days, 0)` 43 | pub fn from_days(days: i32) -> Self { 44 | Self::new(0, days, 0) 45 | } 46 | 47 | /// Equivalent to `new(0, 0, months)` 48 | pub fn from_months(months: i32) -> Self { 49 | Self::new(0, 0, months) 50 | } 51 | 52 | /* 53 | /// rough microseconds 54 | /// 1 day = 86_400_000_000 ms 55 | /// 1 month = 2_629_800_000_000 ms 56 | /// by duckduckgo 57 | pub fn microseconds(&self) -> i64 { 58 | self.months * 2_629_800_000_000i64 + self.days * 86_400_000_000i64 + self.microseconds 59 | } 60 | */ 61 | } 62 | 63 | impl FromSql for PgInterval { 64 | fn from_sql(_ty: &Type, bytes: &[u8]) -> Result> { 65 | let mut bytes = <&[u8]>::clone(&bytes); 66 | let ms = bytes.read_i64::()?; 67 | let days = bytes.read_i32::()?; 68 | let months = bytes.read_i32::()?; 69 | Ok(PgInterval::new(ms, days, months)) 70 | } 71 | 72 | fn accepts(ty: &Type) -> bool { 73 | match *ty { 74 | types::INTERVAL => true, 75 | _ => false, 76 | } 77 | } 78 | } 79 | -------------------------------------------------------------------------------- /crates/codegen/src/dao_derive.rs: -------------------------------------------------------------------------------- 1 | use quote; 2 | use syn; 3 | 4 | pub fn impl_from_dao(ast: &syn::MacroInput) -> quote::Tokens { 5 | let name = &ast.ident; 6 | let fields: Vec<(&syn::Ident, &syn::Ty)> = match ast.body { 7 | syn::Body::Struct(ref data) => { 8 | match *data { 9 | syn::VariantData::Struct(ref fields) => { 10 | fields 11 | .iter() 12 | .map(|f| { 13 | let ident = f.ident.as_ref().unwrap(); 14 | let ty = &f.ty; 15 | (ident, ty) 16 | }) 17 | .collect::>() 18 | } 19 | _ => panic!("Only struct is supported for #[derive(FromDao)]"), 20 | } 21 | } 22 | syn::Body::Enum(_) => panic!("#[derive(FromDao)] can only be used with structs"), 23 | }; 24 | let from_fields: Vec = fields 25 | .iter() 26 | .map(|&(field, _ty)| { 27 | quote! { #field: dao.get(stringify!(#field)).unwrap(),} 28 | }) 29 | .collect(); 30 | 31 | quote! { 32 | impl rustorm_dao::FromDao for #name { 33 | 34 | fn from_dao(dao: &rustorm_dao::Dao) -> Self { 35 | #name { 36 | #(#from_fields)* 37 | } 38 | 39 | } 40 | } 41 | } 42 | } 43 | 44 | pub fn impl_to_dao(ast: &syn::MacroInput) -> quote::Tokens { 45 | let name = &ast.ident; 46 | let generics = &ast.generics; 47 | let fields: Vec<(&syn::Ident, &syn::Ty)> = match ast.body { 48 | syn::Body::Struct(ref data) => { 49 | match *data { 50 | syn::VariantData::Struct(ref fields) => { 51 | fields 52 | .iter() 53 | .map(|f| { 54 | let ident = f.ident.as_ref().unwrap(); 55 | let ty = &f.ty; 56 | (ident, ty) 57 | }) 58 | .collect::>() 59 | } 60 | _ => panic!("Only struct is supported for #[derive(ToDao)]"), 61 | } 62 | } 63 | syn::Body::Enum(_) => panic!("#[derive(ToDao)] can only be used with structs"), 64 | }; 65 | let from_fields: &Vec = &fields 66 | .iter() 67 | .map(|&(field, _ty)| { 68 | quote! { dao.insert(stringify!(#field), &self.#field);} 69 | }) 70 | .collect(); 71 | 72 | quote! { 73 | impl #generics rustorm_dao::ToDao for #name #generics { 74 | fn to_dao(&self) -> rustorm_dao::Dao { 75 | let mut dao = rustorm_dao::Dao::new(); 76 | #(#from_fields)* 77 | dao 78 | } 79 | } 80 | 81 | } 82 | } 83 | 84 | #[cfg(test)] 85 | mod tests {} 86 | -------------------------------------------------------------------------------- /src/platform.rs: -------------------------------------------------------------------------------- 1 | use crate::{error::ParseError, Database}; 2 | use cfg_if::cfg_if; 3 | use std::{convert::TryFrom, ops::Deref}; 4 | use url::Url; 5 | 6 | cfg_if! {if #[cfg(feature = "with-postgres")]{ 7 | use crate::pg::PostgresDB; 8 | }} 9 | 10 | cfg_if! {if #[cfg(feature = "with-sqlite")]{ 11 | use crate::sqlite::SqliteDB; 12 | }} 13 | 14 | cfg_if! {if #[cfg(feature = "with-mysql")]{ 15 | use crate::my::MysqlDB; 16 | }} 17 | 18 | pub enum DBPlatform { 19 | #[cfg(feature = "with-postgres")] 20 | Postgres(Box), 21 | #[cfg(feature = "with-sqlite")] 22 | Sqlite(Box), 23 | #[cfg(feature = "with-mysql")] 24 | Mysql(Box), 25 | } 26 | 27 | impl Deref for DBPlatform { 28 | type Target = dyn Database; 29 | 30 | fn deref(&self) -> &Self::Target { 31 | match *self { 32 | #[cfg(feature = "with-postgres")] 33 | DBPlatform::Postgres(ref pg) => pg.deref(), 34 | #[cfg(feature = "with-sqlite")] 35 | DBPlatform::Sqlite(ref sq) => sq.deref(), 36 | #[cfg(feature = "with-mysql")] 37 | DBPlatform::Mysql(ref my) => my.deref(), 38 | } 39 | } 40 | } 41 | 42 | impl std::ops::DerefMut for DBPlatform { 43 | fn deref_mut(&mut self) -> &mut Self::Target { 44 | match *self { 45 | #[cfg(feature = "with-postgres")] 46 | DBPlatform::Postgres(ref mut pg) => pg.deref_mut(), 47 | #[cfg(feature = "with-sqlite")] 48 | DBPlatform::Sqlite(ref mut sq) => sq.deref_mut(), 49 | #[cfg(feature = "with-mysql")] 50 | DBPlatform::Mysql(ref mut my) => my.deref_mut(), 51 | } 52 | } 53 | } 54 | 55 | pub(crate) enum Platform { 56 | #[cfg(feature = "with-postgres")] 57 | Postgres, 58 | #[cfg(feature = "with-sqlite")] 59 | Sqlite(String), 60 | #[cfg(feature = "with-mysql")] 61 | Mysql, 62 | Unsupported(String), 63 | } 64 | 65 | impl<'a> TryFrom<&'a str> for Platform { 66 | type Error = ParseError; 67 | 68 | fn try_from(s: &'a str) -> Result { 69 | let url = Url::parse(s); 70 | match url { 71 | Ok(url) => { 72 | let scheme = url.scheme(); 73 | match scheme { 74 | #[cfg(feature = "with-postgres")] 75 | "postgres" => Ok(Platform::Postgres), 76 | #[cfg(feature = "with-sqlite")] 77 | "sqlite" => { 78 | let host = url.host_str().unwrap(); 79 | let path = url.path(); 80 | let path = if path == "/" { "" } else { path }; 81 | let db_file = format!("{}{}", host, path); 82 | Ok(Platform::Sqlite(db_file)) 83 | } 84 | #[cfg(feature = "with-mysql")] 85 | "mysql" => Ok(Platform::Mysql), 86 | _ => Ok(Platform::Unsupported(scheme.to_string())), 87 | } 88 | } 89 | Err(e) => Err(ParseError::DbUrlParseError(e)), 90 | } 91 | } 92 | } 93 | -------------------------------------------------------------------------------- /examples/insert_to_sqlite.rs: -------------------------------------------------------------------------------- 1 | use chrono::{NaiveDateTime, NaiveDate, NaiveTime}; 2 | use rustorm::{DbError, FromDao, Pool, ToColumnNames, ToDao, ToTableName, Value}; 3 | 4 | fn main() { 5 | mod for_insert { 6 | use super::*; 7 | #[derive(Debug, PartialEq, ToDao, ToColumnNames, ToTableName)] 8 | pub struct Actor { 9 | pub first_name: String, 10 | pub last_name: String, 11 | pub somedate: NaiveDateTime, 12 | } 13 | } 14 | 15 | mod for_retrieve { 16 | use super::*; 17 | #[derive(Debug, FromDao, ToColumnNames, ToTableName)] 18 | pub struct Actor { 19 | pub actor_id: i64, 20 | pub first_name: String, 21 | pub last_name: String, 22 | pub somedate: NaiveDateTime, 23 | pub last_update: NaiveDateTime, 24 | } 25 | } 26 | let create_sql = "CREATE TABLE actor( 27 | actor_id integer PRIMARY KEY AUTOINCREMENT, 28 | first_name text, 29 | last_name text, 30 | somedate text, 31 | last_update timestamp DEFAULT current_timestamp 32 | )"; 33 | 34 | let db_url = "sqlite:///tmp/sqlite.db"; 35 | let mut pool = Pool::new(); 36 | let mut em = pool.em(db_url).unwrap(); 37 | let ret = em.db().execute_sql_with_return(create_sql, &[]); 38 | println!("ret: {:?}", ret); 39 | assert!(ret.is_ok()); 40 | 41 | let d = NaiveDate::from_ymd(2000, 10, 9); 42 | let t = NaiveTime::from_hms_milli(15, 2, 55, 2); 43 | 44 | let tom_cruise = for_insert::Actor { 45 | first_name: "TOM".into(), 46 | last_name: "CRUISE".to_string(), 47 | somedate: NaiveDateTime::new(d,t), 48 | }; 49 | 50 | let d = NaiveDate::from_ymd(2000, 10, 9); 51 | let t = NaiveTime::from_hms_milli(15, 2, 55, 22); 52 | let tom_hanks = for_insert::Actor { 53 | first_name: "TOM".into(), 54 | last_name: "HANKS".to_string(), 55 | somedate: NaiveDateTime::new(d,t), 56 | }; 57 | 58 | let d = NaiveDate::from_ymd(2000, 10, 9); 59 | let t = NaiveTime::from_hms_milli(15, 2, 55, 222); 60 | let tom_selleck = for_insert::Actor { 61 | first_name: "TOM".into(), 62 | last_name: "SELLECK".to_string(), 63 | somedate: NaiveDateTime::new(d,t), 64 | }; 65 | println!("tom_cruise: {:#?}", tom_cruise); 66 | println!("tom_hanks: {:#?}", tom_hanks); 67 | println!("tom_selleck: {:#?}", tom_selleck); 68 | 69 | let actors = vec![tom_cruise, tom_hanks, tom_selleck]; 70 | 71 | for actor in actors { 72 | let first_name: Value = actor.first_name.into(); 73 | let last_name: Value = actor.last_name.into(); 74 | let somedate: Value = actor.somedate.into(); 75 | let ret = em.db().execute_sql_with_return( 76 | "INSERT INTO actor(first_name, last_name, somedate) 77 | VALUES ($1, $2, $3)", 78 | &[&first_name, &last_name, &somedate], 79 | ); 80 | assert!(ret.is_ok()); 81 | } 82 | 83 | let actors: Result, DbError> = 84 | em.execute_sql_with_return("SELECT * from actor", &[]); 85 | println!("Actor: {:#?}", actors); 86 | assert!(actors.is_ok()); 87 | } 88 | -------------------------------------------------------------------------------- /crates/dao/src/rows.rs: -------------------------------------------------------------------------------- 1 | use crate::{ 2 | Dao, 3 | Value, 4 | }; 5 | use serde_derive::{ 6 | Deserialize, 7 | Serialize, 8 | }; 9 | use std::slice; 10 | 11 | /// use this to store data retrieved from the database 12 | /// This is also slimmer than Vec when serialized 13 | #[derive(Debug, PartialEq, Serialize, Deserialize, Clone)] 14 | pub struct Rows { 15 | pub columns: Vec, 16 | pub data: Vec>, 17 | /// can be optionally set, indicates how many total rows are there in the table 18 | pub count: Option, 19 | } 20 | 21 | impl Rows { 22 | pub fn empty() -> Self { Rows::new(vec![]) } 23 | 24 | pub fn new(columns: Vec) -> Self { 25 | Rows { 26 | columns, 27 | data: vec![], 28 | count: None, 29 | } 30 | } 31 | 32 | pub fn push(&mut self, row: Vec) { self.data.push(row) } 33 | 34 | /// Returns an iterator over the `Row`s. 35 | pub fn iter(&self) -> Iter { 36 | Iter { 37 | columns: self.columns.clone(), 38 | iter: self.data.iter(), 39 | } 40 | } 41 | } 42 | 43 | /// An iterator over `Row`s. 44 | pub struct Iter<'a> { 45 | columns: Vec, 46 | iter: slice::Iter<'a, Vec>, 47 | } 48 | 49 | impl<'a> Iterator for Iter<'a> { 50 | type Item = Dao; 51 | 52 | fn next(&mut self) -> Option { 53 | let next_row = self.iter.next(); 54 | if let Some(row) = next_row { 55 | if !row.is_empty() { 56 | let mut dao = Dao::new(); 57 | for (i, column) in self.columns.iter().enumerate() { 58 | if let Some(value) = row.get(i) { 59 | dao.insert_value(column, value); 60 | } 61 | } 62 | Some(dao) 63 | } else { 64 | None 65 | } 66 | } else { 67 | None 68 | } 69 | } 70 | 71 | fn size_hint(&self) -> (usize, Option) { self.iter.size_hint() } 72 | } 73 | 74 | impl<'a> ExactSizeIterator for Iter<'a> {} 75 | 76 | #[cfg(test)] 77 | mod test { 78 | use super::*; 79 | 80 | #[test] 81 | fn iteration_count() { 82 | let columns = vec!["id".to_string(), "username".to_string()]; 83 | let data: Vec> = vec![vec![1.into(), "ivanceras".into()]]; 84 | let rows = Rows { 85 | columns, 86 | data, 87 | count: None, 88 | }; 89 | assert_eq!(1, rows.iter().count()); 90 | } 91 | 92 | #[test] 93 | fn iteration_count2() { 94 | let columns = vec!["id".to_string(), "username".to_string()]; 95 | let data: Vec> = vec![vec![1.into(), "ivanceras".into()], vec![ 96 | 2.into(), 97 | "lee".into(), 98 | ]]; 99 | let rows = Rows { 100 | columns, 101 | data, 102 | count: None, 103 | }; 104 | assert_eq!(2, rows.iter().count()); 105 | } 106 | 107 | #[test] 108 | fn dao() { 109 | let columns = vec!["id".to_string(), "username".to_string()]; 110 | let data: Vec> = vec![vec![1.into(), "ivanceras".into()]]; 111 | let rows = Rows { 112 | columns, 113 | data, 114 | count: None, 115 | }; 116 | let mut dao = Dao::new(); 117 | dao.insert("id", 1); 118 | dao.insert("username", "ivanceras"); 119 | assert_eq!(dao, rows.iter().next().unwrap()); 120 | } 121 | 122 | #[test] 123 | fn dao2() { 124 | let columns = vec!["id".to_string(), "username".to_string()]; 125 | let data: Vec> = vec![vec![1.into(), "ivanceras".into()], vec![ 126 | 2.into(), 127 | "lee".into(), 128 | ]]; 129 | let rows = Rows { 130 | columns, 131 | data, 132 | count: None, 133 | }; 134 | let mut iter = rows.iter(); 135 | let mut dao = Dao::new(); 136 | dao.insert("id", 1); 137 | dao.insert("username", "ivanceras"); 138 | assert_eq!(dao, iter.next().unwrap()); 139 | 140 | let mut dao2 = Dao::new(); 141 | dao2.insert("id", 2); 142 | dao2.insert("username", "lee"); 143 | assert_eq!(dao2, iter.next().unwrap()); 144 | } 145 | } 146 | -------------------------------------------------------------------------------- /crates/dao/src/dao.rs: -------------------------------------------------------------------------------- 1 | use crate::{ 2 | error::DaoError, 3 | FromValue, 4 | ToValue, 5 | Value, 6 | }; 7 | use serde::{ 8 | ser::{ 9 | Serialize, 10 | Serializer, 11 | }, 12 | Deserialize, 13 | Deserializer, 14 | }; 15 | use std::collections::BTreeMap; 16 | 17 | #[derive(Debug, PartialEq, Clone, Default)] 18 | pub struct Dao(pub BTreeMap); 19 | 20 | impl Dao { 21 | pub fn new() -> Self { Dao::default() } 22 | 23 | pub fn insert(&mut self, k: K, v: V) 24 | where 25 | K: ToString, 26 | V: ToValue, 27 | { 28 | self.0.insert(k.to_string(), v.to_value()); 29 | } 30 | 31 | pub fn insert_value(&mut self, k: K, value: &Value) 32 | where 33 | K: ToString, 34 | { 35 | self.0.insert(k.to_string(), value.clone()); 36 | } 37 | 38 | pub fn get<'a, T>(&'a self, s: &str) -> Result 39 | where 40 | T: FromValue, 41 | { 42 | let value: Option<&'a Value> = self.0.get(s); 43 | match value { 44 | Some(v) => FromValue::from_value(v).map_err(DaoError::ConvertError), 45 | None => Err(DaoError::NoSuchValueError(s.into())), 46 | } 47 | } 48 | 49 | pub fn get_opt<'a, T>(&'a self, s: &str) -> Result, DaoError> 50 | where 51 | T: FromValue, 52 | { 53 | let value: Option<&'a Value> = self.0.get(s); 54 | match value { 55 | Some(v) => { 56 | match v { 57 | Value::Nil => Ok(None), 58 | _ => { 59 | Ok(Some( 60 | FromValue::from_value(v).map_err(DaoError::ConvertError)?, 61 | )) 62 | } 63 | } 64 | } 65 | None => Ok(None), 66 | } 67 | } 68 | 69 | pub fn get_value(&self, s: &str) -> Option<&Value> { self.0.get(s) } 70 | 71 | pub fn remove(&mut self, s: &str) -> Option { self.0.remove(s) } 72 | } 73 | 74 | impl<'a> Serialize for Dao { 75 | fn serialize(&self, serializer: S) -> Result 76 | where 77 | S: Serializer, 78 | { 79 | self.0.serialize(serializer) 80 | } 81 | } 82 | 83 | impl<'de> Deserialize<'de> for Dao { 84 | fn deserialize(deserializer: D) -> Result 85 | where 86 | D: Deserializer<'de>, 87 | { 88 | BTreeMap::deserialize(deserializer).map(Dao) 89 | } 90 | } 91 | 92 | pub trait FromDao { 93 | /// convert dao to an instance of the corresponding struct of the model 94 | /// taking into considerating the renamed columns 95 | fn from_dao(dao: &Dao) -> Self; 96 | } 97 | 98 | pub trait ToDao { 99 | /// convert from an instance of the struct to a dao representation 100 | /// to be saved into the database 101 | fn to_dao(&self) -> Dao; 102 | } 103 | 104 | #[cfg(test)] 105 | mod tests { 106 | use super::*; 107 | use serde_json; 108 | use uuid::Uuid; 109 | 110 | #[test] 111 | fn insert_double() { 112 | let mut dao = Dao::new(); 113 | dao.insert("life", 42.0f64); 114 | let life: Result = dao.get("life"); 115 | assert_eq!(life.unwrap(), 42.0f64); 116 | } 117 | 118 | #[test] 119 | fn insert_float() { 120 | let mut dao = Dao::new(); 121 | dao.insert("life", 42.0f32); 122 | let life: Result = dao.get("life"); 123 | assert_eq!(life.unwrap(), 42.0f64); 124 | } 125 | 126 | #[test] 127 | fn uuid() { 128 | let mut dao = Dao::new(); 129 | let uuid = Uuid::new_v4(); 130 | dao.insert("user_id", uuid); 131 | } 132 | 133 | #[test] 134 | fn serialize_json() { 135 | let mut dao = Dao::new(); 136 | dao.insert("life", 42); 137 | dao.insert("lemons", "lemonade"); 138 | let json = serde_json::to_string(&dao).unwrap(); 139 | let expected = r#"{"lemons":{"Text":"lemonade"},"life":{"Int":42}}"#; 140 | assert_eq!(json, expected); 141 | } 142 | 143 | #[test] 144 | fn test_get_opt() { 145 | let mut dao = Dao::new(); 146 | dao.insert("life", 42); 147 | let life: Result, _> = dao.get("life"); 148 | assert!(life.is_ok()); 149 | let life = life.unwrap(); 150 | assert!(life.is_some()); 151 | assert_eq!(life.unwrap(), 42); 152 | } 153 | 154 | #[test] 155 | fn referenced() { 156 | let mut dao = Dao::new(); 157 | let v = 42; 158 | let s = "lemonade"; 159 | dao.insert("life", &v); 160 | dao.insert("lemons", s); 161 | let life: Result, _> = dao.get("life"); 162 | assert!(life.is_ok()); 163 | let life = life.unwrap(); 164 | assert!(life.is_some()); 165 | assert_eq!(life.unwrap(), 42); 166 | } 167 | } 168 | -------------------------------------------------------------------------------- /src/types.rs: -------------------------------------------------------------------------------- 1 | use rustorm_dao::{value::Array, Value}; 2 | use serde::{Deserialize, Serialize}; 3 | 4 | #[derive(Debug, Serialize, Deserialize, PartialEq, Clone)] 5 | pub enum SqlType { 6 | Bool, 7 | Tinyint, 8 | Smallint, 9 | Int, 10 | Bigint, 11 | 12 | Real, 13 | Float, 14 | Double, 15 | Numeric, 16 | 17 | Tinyblob, 18 | Mediumblob, 19 | Blob, 20 | Longblob, 21 | Varbinary, 22 | 23 | Char, 24 | Varchar, 25 | Tinytext, 26 | Mediumtext, 27 | Text, 28 | Json, 29 | TsVector, 30 | 31 | Uuid, 32 | Date, 33 | Timestamp, 34 | TimestampTz, 35 | 36 | Time, 37 | TimeTz, 38 | Interval, 39 | 40 | IpAddress, 41 | 42 | Point, 43 | 44 | // enum list with the choices value 45 | Enum(String, Vec), 46 | Array(Box), 47 | } 48 | 49 | impl SqlType { 50 | pub fn is_array_type(&self) -> bool { 51 | match *self { 52 | SqlType::Array(_) => true, 53 | _ => false, 54 | } 55 | } 56 | 57 | pub fn is_integer_type(&self) -> bool { 58 | match *self { 59 | SqlType::Int => true, 60 | SqlType::Tinyint => true, 61 | SqlType::Smallint => true, 62 | SqlType::Bigint => true, 63 | _ => false, 64 | } 65 | } 66 | 67 | pub fn is_decimal_type(&self) -> bool { 68 | match *self { 69 | SqlType::Real => true, 70 | SqlType::Float => true, 71 | SqlType::Double => true, 72 | SqlType::Numeric => true, 73 | _ => false, 74 | } 75 | } 76 | 77 | pub fn cast_as(&self) -> Option { 78 | match *self { 79 | SqlType::TsVector => Some(SqlType::Text), 80 | _ => None, 81 | } 82 | } 83 | 84 | pub fn name(&self) -> String { 85 | match *self { 86 | SqlType::Text => "text".into(), 87 | SqlType::TsVector => "tsvector".into(), 88 | SqlType::Array(ref ty) => match ty.as_ref() { 89 | SqlType::Text => "text[]".into(), 90 | _ => panic!("not yet dealt {:?}", self), 91 | }, 92 | _ => panic!("not yet dealt {:?}", self), 93 | } 94 | } 95 | } 96 | 97 | #[derive(Debug, Serialize, PartialEq, Clone)] 98 | pub enum ArrayType { 99 | Bool, 100 | Tinyint, 101 | Smallint, 102 | Int, 103 | Bigint, 104 | 105 | Real, 106 | Float, 107 | Double, 108 | Numeric, 109 | 110 | Char, 111 | Varchar, 112 | Tinytext, 113 | Mediumtext, 114 | Text, 115 | 116 | Uuid, 117 | Date, 118 | Timestamp, 119 | TimestampTz, 120 | 121 | Enum(String, Vec), 122 | } 123 | 124 | trait HasType { 125 | fn get_type(&self) -> Option; 126 | } 127 | 128 | impl HasType for Value { 129 | fn get_type(&self) -> Option { 130 | match self { 131 | Value::Nil => None, 132 | Value::Bool(_) => Some(SqlType::Bool), 133 | Value::Tinyint(_) => Some(SqlType::Tinyint), 134 | Value::Smallint(_) => Some(SqlType::Smallint), 135 | Value::Int(_) => Some(SqlType::Int), 136 | Value::Bigint(_) => Some(SqlType::Bigint), 137 | Value::Float(_) => Some(SqlType::Float), 138 | Value::Double(_) => Some(SqlType::Double), 139 | Value::BigDecimal(_) => Some(SqlType::Numeric), 140 | Value::Blob(_) => Some(SqlType::Blob), 141 | Value::Char(_) => Some(SqlType::Char), 142 | Value::Text(_) => Some(SqlType::Text), 143 | Value::Json(_) => Some(SqlType::Json), 144 | Value::Uuid(_) => Some(SqlType::Uuid), 145 | Value::Date(_) => Some(SqlType::Date), 146 | Value::Time(_) => Some(SqlType::Time), 147 | Value::DateTime(_) => Some(SqlType::Timestamp), 148 | Value::Timestamp(_) => Some(SqlType::Timestamp), 149 | Value::Interval(_) => Some(SqlType::Interval), 150 | Value::Point(_) => Some(SqlType::Point), 151 | Value::Array(Array::Int(_)) => Some(SqlType::Array(Box::new(SqlType::Int))), 152 | Value::Array(Array::Float(_)) => Some(SqlType::Array(Box::new(SqlType::Float))), 153 | Value::Array(Array::Text(_)) => Some(SqlType::Array(Box::new(SqlType::Text))), 154 | } 155 | } 156 | } 157 | 158 | impl SqlType { 159 | pub fn same_type(&self, value: &Value) -> bool { 160 | if let Some(simple_type) = value.get_type() { 161 | if simple_type == *self { 162 | return true; 163 | } 164 | match (self, value) { 165 | (SqlType::Varchar, Value::Text(_)) => true, 166 | (SqlType::TimestampTz, Value::Timestamp(_)) => true, 167 | (_, _) => false, 168 | } 169 | } else { 170 | false 171 | } 172 | } 173 | } 174 | -------------------------------------------------------------------------------- /src/column.rs: -------------------------------------------------------------------------------- 1 | use crate::{types::SqlType, ColumnName, FromDao, TableName}; 2 | use uuid::Uuid; 3 | 4 | #[derive(Debug, PartialEq, Clone)] 5 | pub struct ColumnDef { 6 | pub table: TableName, 7 | pub name: ColumnName, 8 | pub comment: Option, 9 | pub specification: ColumnSpecification, 10 | pub stat: Option, 11 | } 12 | 13 | impl ColumnDef { 14 | /// check all the column constraint if any has AutoIncrement 15 | pub fn is_autoincrement(&self) -> bool { 16 | self.autoincrement_sequence_name().is_some() 17 | } 18 | 19 | /// get the sequnce name of this autoincrement column 20 | pub fn autoincrement_sequence_name(&self) -> Option<&String> { 21 | self.specification 22 | .constraints 23 | .iter() 24 | .find_map(|c| match &c { 25 | ColumnConstraint::AutoIncrement(sequence_name) => sequence_name.as_ref(), 26 | _ => None, 27 | }) 28 | } 29 | 30 | /// check if any of the column constraint default is generated from uuid 31 | pub fn default_is_generated_uuid(&self) -> bool { 32 | self.specification.constraints.iter().any(|c| match *c { 33 | ColumnConstraint::DefaultValue(ref literal) => match *literal { 34 | Literal::UuidGenerateV4 => true, 35 | _ => false, 36 | }, 37 | _ => false, 38 | }) 39 | } 40 | 41 | pub fn is_not_null(&self) -> bool { 42 | self.specification.constraints.iter().any(|c| match *c { 43 | ColumnConstraint::NotNull => true, 44 | _ => false, 45 | }) 46 | } 47 | 48 | pub fn get_sql_type(&self) -> SqlType { 49 | self.specification.sql_type.clone() 50 | } 51 | 52 | pub fn cast_as(&self) -> Option { 53 | self.get_sql_type().cast_as() 54 | } 55 | 56 | pub fn has_generated_default(&self) -> bool { 57 | self.specification.constraints.iter().any(|c| match *c { 58 | ColumnConstraint::DefaultValue(ref literal) => match *literal { 59 | Literal::Bool(_) => true, 60 | Literal::Null => false, 61 | Literal::Integer(_) => true, 62 | Literal::Double(_) => true, 63 | Literal::UuidGenerateV4 => true, 64 | Literal::Uuid(_) => true, 65 | Literal::String(_) => false, 66 | Literal::Blob(_) => false, 67 | Literal::CurrentTime => true, 68 | Literal::CurrentDate => true, 69 | Literal::CurrentTimestamp => true, 70 | Literal::ArrayInt(_) => false, 71 | Literal::ArrayFloat(_) => false, 72 | Literal::ArrayString(_) => false, 73 | }, 74 | _ => false, 75 | }) 76 | } 77 | } 78 | 79 | #[derive(Debug, PartialEq, Clone)] 80 | pub struct ColumnSpecification { 81 | pub sql_type: SqlType, 82 | pub capacity: Option, 83 | pub constraints: Vec, 84 | } 85 | 86 | impl ColumnSpecification { 87 | pub fn get_limit(&self) -> Option { 88 | match self.capacity { 89 | Some(ref capacity) => capacity.get_limit(), 90 | None => None, 91 | } 92 | } 93 | } 94 | 95 | #[derive(Debug, PartialEq, Clone)] 96 | pub enum Capacity { 97 | Limit(i32), 98 | Range(i32, i32), 99 | } 100 | 101 | impl Capacity { 102 | fn get_limit(&self) -> Option { 103 | match *self { 104 | Capacity::Limit(limit) => Some(limit), 105 | Capacity::Range(_whole, _decimal) => None, 106 | } 107 | } 108 | } 109 | 110 | #[derive(Debug, PartialEq, Clone)] 111 | pub enum ColumnConstraint { 112 | NotNull, 113 | DefaultValue(Literal), 114 | /// the string contains the sequence name of this serial column 115 | AutoIncrement(Option), 116 | } 117 | 118 | #[derive(Debug, PartialEq, Clone)] 119 | pub enum Literal { 120 | Bool(bool), 121 | Null, 122 | Integer(i64), 123 | Double(f64), 124 | UuidGenerateV4, // pg: uuid_generate_v4(); 125 | Uuid(Uuid), 126 | String(String), 127 | Blob(Vec), 128 | CurrentTime, // pg: now() 129 | CurrentDate, //pg: today() 130 | CurrentTimestamp, // pg: now() 131 | ArrayInt(Vec), 132 | ArrayFloat(Vec), 133 | ArrayString(Vec), 134 | } 135 | 136 | /// column stat, derive from pg_stats 137 | #[derive(Debug, PartialEq, FromDao, Clone)] 138 | pub struct ColumnStat { 139 | pub avg_width: i32, /* average width of the column, (the number of characters) */ 140 | //most_common_values: Value,//top 5 most common values 141 | pub n_distinct: f32, // the number of distinct values of these column 142 | } 143 | 144 | impl From for Literal { 145 | fn from(i: i64) -> Self { 146 | Literal::Integer(i) 147 | } 148 | } 149 | 150 | impl From for Literal { 151 | fn from(s: String) -> Self { 152 | Literal::String(s) 153 | } 154 | } 155 | 156 | impl<'a> From<&'a str> for Literal { 157 | fn from(s: &'a str) -> Self { 158 | Literal::String(String::from(s)) 159 | } 160 | } 161 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | [![Build Status](https://travis-ci.org/ivanceras/rustorm.svg?branch=master)](https://travis-ci.org/ivanceras/rustorm) 2 | 3 | # rustorm 4 | 5 | 6 | ### Rustorm 7 | 8 | [![Financial Contributors on Open Collective](https://opencollective.com/rustorm/all/badge.svg?label=financial+contributors)](https://opencollective.com/rustorm) [![Latest Version](https://img.shields.io/crates/v/rustorm.svg)](https://crates.io/crates/rustorm) 9 | [![Build Status](https://travis-ci.org/ivanceras/rustorm.svg?branch=master)](https://travis-ci.org/ivanceras/rustorm) 10 | [![MIT licensed](https://img.shields.io/badge/license-MIT-blue.svg)](./LICENSE) 11 | 12 | Rustorm is an SQL-centered ORM with focus on ease of use on conversion of database types to 13 | their appropriate rust type. 14 | 15 | Selecting records 16 | 17 | ```rust 18 | use rustorm::{ 19 | DbError, 20 | FromDao, 21 | Pool, 22 | ToColumnNames, 23 | ToTableName, 24 | }; 25 | 26 | #[derive(Debug, FromDao, ToColumnNames, ToTableName)] 27 | struct Actor { 28 | actor_id: i32, 29 | first_name: String, 30 | } 31 | 32 | #[cfg(any(feature="with-postgres", feature = "with-sqlite"))] 33 | fn main() { 34 | let mut pool = Pool::new(); 35 | #[cfg(feature = "with-sqlite")] 36 | let db_url = "sqlite://sakila.db"; 37 | #[cfg(feature = "with-postgres")] 38 | let db_url = "postgres://postgres:p0stgr3s@localhost/sakila"; 39 | let em = pool.em(db_url).unwrap(); 40 | let sql = "SELECT * FROM actor LIMIT 10"; 41 | let actors: Result, DbError> = 42 | em.execute_sql_with_return(sql, &[]); 43 | println!("Actor: {:#?}", actors); 44 | let actors = actors.unwrap(); 45 | assert_eq!(actors.len(), 10); 46 | for actor in actors { 47 | println!("actor: {:?}", actor); 48 | } 49 | } 50 | #[cfg(feature="with-mysql")] 51 | fn main() { 52 | println!("see examples for mysql usage, mysql has a little difference in the api"); 53 | } 54 | ``` 55 | Inserting and displaying the inserted records 56 | 57 | ```rust 58 | use chrono::{ 59 | offset::Utc, 60 | DateTime, 61 | NaiveDate, 62 | }; 63 | use rustorm::{ 64 | DbError, 65 | FromDao, 66 | Pool, 67 | TableName, 68 | ToColumnNames, 69 | ToDao, 70 | ToTableName, 71 | }; 72 | 73 | 74 | #[cfg(any(feature="with-postgres", feature = "with-sqlite"))] 75 | fn main() { 76 | mod for_insert { 77 | use super::*; 78 | #[derive(Debug, PartialEq, ToDao, ToColumnNames, ToTableName)] 79 | pub struct Actor { 80 | pub first_name: String, 81 | pub last_name: String, 82 | } 83 | } 84 | 85 | mod for_retrieve { 86 | use super::*; 87 | #[derive(Debug, FromDao, ToColumnNames, ToTableName)] 88 | pub struct Actor { 89 | pub actor_id: i32, 90 | pub first_name: String, 91 | pub last_name: String, 92 | pub last_update: DateTime, 93 | } 94 | } 95 | 96 | let mut pool = Pool::new(); 97 | #[cfg(feature = "with-sqlite")] 98 | let db_url = "sqlite://sakila.db"; 99 | #[cfg(feature = "with-postgres")] 100 | let db_url = "postgres://postgres:p0stgr3s@localhost/sakila"; 101 | let em = pool.em(db_url).unwrap(); 102 | let tom_cruise = for_insert::Actor { 103 | first_name: "TOM".into(), 104 | last_name: "CRUISE".to_string(), 105 | }; 106 | let tom_hanks = for_insert::Actor { 107 | first_name: "TOM".into(), 108 | last_name: "HANKS".to_string(), 109 | }; 110 | 111 | let actors: Result, DbError> = 112 | em.insert(&[&tom_cruise, &tom_hanks]); 113 | println!("Actor: {:#?}", actors); 114 | assert!(actors.is_ok()); 115 | let actors = actors.unwrap(); 116 | let today = Utc::now().date(); 117 | assert_eq!(tom_cruise.first_name, actors[0].first_name); 118 | assert_eq!(tom_cruise.last_name, actors[0].last_name); 119 | assert_eq!(today, actors[0].last_update.date()); 120 | 121 | assert_eq!(tom_hanks.first_name, actors[1].first_name); 122 | assert_eq!(tom_hanks.last_name, actors[1].last_name); 123 | assert_eq!(today, actors[1].last_update.date()); 124 | } 125 | #[cfg(feature="with-mysql")] 126 | fn main() { 127 | println!("see examples for mysql usage, mysql has a little difference in the api"); 128 | } 129 | ``` 130 | Rustorm is wholly used by [diwata](https://github.com/ivanceras/diwata) 131 | 132 | License: MIT 133 | 134 | ## Contributors 135 | 136 | ### Code Contributors 137 | 138 | This project exists thanks to all the people who contribute. [[Contribute](CONTRIBUTING.md)]. 139 | 140 | 141 | ### Financial Contributors 142 | 143 | Become a financial contributor and help us sustain our community. [[Contribute](https://opencollective.com/rustorm/contribute)] 144 | 145 | #### Individuals 146 | 147 | 148 | 149 | #### Organizations 150 | 151 | Support this project with your organization. Your logo will show up here with a link to your website. [[Contribute](https://opencollective.com/rustorm/contribute)] 152 | 153 | 154 | 155 | 156 | 157 | 158 | 159 | 160 | 161 | 162 | 163 | -------------------------------------------------------------------------------- /src/error.rs: -------------------------------------------------------------------------------- 1 | use cfg_if::cfg_if; 2 | use r2d2; 3 | use thiserror::Error; 4 | use url; 5 | 6 | cfg_if! {if #[cfg(feature = "with-postgres")]{ 7 | use crate::pg::PostgresError; 8 | }} 9 | 10 | cfg_if! {if #[cfg(feature = "with-sqlite")]{ 11 | use crate::sqlite::SqliteError; 12 | use rusqlite; 13 | }} 14 | 15 | cfg_if! {if #[cfg(feature = "with-mysql")]{ 16 | use crate::my::MysqlError; 17 | }} 18 | 19 | #[derive(Debug, Error)] 20 | pub enum ConnectError { 21 | #[error("No such pool connection")] 22 | NoSuchPoolConnection, 23 | #[error("{0}")] 24 | ParseError(#[from] ParseError), 25 | #[error("Database not supported: {0}")] 26 | UnsupportedDb(String), 27 | #[error("{0}")] 28 | R2d2Error(#[from] r2d2::Error), 29 | } 30 | 31 | #[derive(Debug, Error)] 32 | pub enum ParseError { 33 | #[error("Database url parse error: {0}")] 34 | DbUrlParseError(#[from] url::ParseError), 35 | } 36 | 37 | #[derive(Debug, Error)] 38 | #[error("{0}")] 39 | pub enum PlatformError { 40 | #[cfg(feature = "with-postgres")] 41 | #[error("{0}")] 42 | PostgresError(#[from] PostgresError), 43 | #[cfg(feature = "with-sqlite")] 44 | #[error("{0}")] 45 | SqliteError(#[from] SqliteError), 46 | #[cfg(feature = "with-mysql")] 47 | #[error("{0}")] 48 | MysqlError(#[from] MysqlError), 49 | } 50 | 51 | impl Into for PlatformError { 52 | /// attempt to convert platform specific error to DataOpeation error 53 | fn into(self) -> DataOpError { 54 | match self { 55 | #[cfg(feature = "with-postgres")] 56 | PlatformError::PostgresError(postgres_err) => match postgres_err { 57 | PostgresError::SqlError(ref pg_err, ref sql) => { 58 | if let Some(db_err) = pg_err.as_db() { 59 | use crate::TableName; 60 | let postgres::error::DbError { 61 | severity, 62 | code, 63 | message, 64 | detail, 65 | schema, 66 | table, 67 | column, 68 | datatype, 69 | constraint, 70 | .. 71 | } = db_err; 72 | 73 | DataOpError::ConstraintError { 74 | severity: severity.clone(), 75 | code: code.code().to_string(), 76 | message: message.clone(), 77 | detail: detail.clone(), 78 | cause_table: if let Some(table) = table { 79 | Some( 80 | TableName { 81 | name: table.to_string(), 82 | schema: schema.clone(), 83 | alias: None, 84 | } 85 | .complete_name(), 86 | ) 87 | } else { 88 | None 89 | }, 90 | constraint: constraint.clone(), 91 | column: column.clone(), 92 | datatype: datatype.clone(), 93 | sql: sql.to_string(), 94 | } 95 | } else { 96 | DataOpError::GenericError { 97 | message: postgres_err.to_string(), 98 | sql: None, 99 | } 100 | } 101 | } 102 | _ => DataOpError::GenericError { 103 | message: postgres_err.to_string(), 104 | sql: None, 105 | }, 106 | }, 107 | #[cfg(feature = "with-sqlite")] 108 | PlatformError::SqliteError(e) => DataOpError::GenericError { 109 | message: e.to_string(), 110 | sql: None, 111 | }, 112 | #[cfg(feature = "with-mysql")] 113 | PlatformError::MysqlError(e) => DataOpError::GenericError { 114 | message: e.to_string(), 115 | sql: None, 116 | }, 117 | } 118 | } 119 | } 120 | 121 | //Note: this is needed coz there is 2 level of variant before we can convert postgres error to 122 | //platform error 123 | #[cfg(feature = "with-postgres")] 124 | impl From for DbError { 125 | fn from(e: PostgresError) -> Self { 126 | DbError::DataOpError(PlatformError::from(e).into()) 127 | } 128 | } 129 | 130 | #[cfg(feature = "with-sqlite")] 131 | impl From for DbError { 132 | fn from(e: rusqlite::Error) -> Self { 133 | DbError::DataOpError(PlatformError::SqliteError(SqliteError::from(e)).into()) 134 | } 135 | } 136 | 137 | #[cfg(feature = "with-sqlite")] 138 | impl From for DbError { 139 | fn from(e: SqliteError) -> Self { 140 | DbError::DataOpError(PlatformError::SqliteError(e.into()).into()) 141 | } 142 | } 143 | 144 | #[cfg(feature = "with-mysql")] 145 | impl From for DbError { 146 | fn from(e: MysqlError) -> Self { 147 | DbError::DataOpError(PlatformError::MysqlError(e.into()).into()) 148 | } 149 | } 150 | 151 | #[derive(Debug, Error)] 152 | pub enum DbError { 153 | #[error("Sql injection attempt error: {0}")] 154 | SqlInjectionAttempt(String), 155 | #[error("{0}")] 156 | DataError(#[from] DataError), 157 | #[error("{0}")] 158 | DataOpError(#[from] DataOpError), 159 | #[error("{0}")] 160 | ConvertError(#[from] ConvertError), 161 | #[error("{0}")] 162 | ConnectError(#[from] ConnectError), //agnostic connection error 163 | #[error("Unsupported operation: {0}")] 164 | UnsupportedOperation(String), 165 | } 166 | 167 | #[derive(Debug, Error)] 168 | pub enum DataOpError { 169 | /// The Data Delete Operation failed due record is still referenced from another table 170 | #[error("{constraint:?}, {cause_table:?}")] 171 | ConstraintError { 172 | severity: String, 173 | code: String, 174 | message: String, 175 | detail: Option, 176 | cause_table: Option, 177 | constraint: Option, 178 | column: Option, 179 | datatype: Option, 180 | sql: String, 181 | }, 182 | #[error("{message}")] 183 | GenericError { 184 | message: String, 185 | sql: Option, 186 | }, 187 | } 188 | 189 | #[derive(Debug, Error)] 190 | pub enum ConvertError { 191 | #[error("Unknown data type")] 192 | UnknownDataType, 193 | #[error("Unsupported data type {0}")] 194 | UnsupportedDataType(String), 195 | } 196 | 197 | #[derive(Debug, Error)] 198 | pub enum DataError { 199 | #[error("Zero record returned")] 200 | ZeroRecordReturned, 201 | #[error("More than one record returned")] 202 | MoreThan1RecordReturned, 203 | #[error("Table {0} not found")] 204 | TableNameNotFound(String), 205 | } 206 | -------------------------------------------------------------------------------- /src/common.rs: -------------------------------------------------------------------------------- 1 | use crate::{column::Capacity, types::SqlType, Value}; 2 | use bigdecimal::BigDecimal; 3 | use chrono::NaiveDateTime; 4 | use log::*; 5 | use num_traits::ToPrimitive; 6 | use std::str::FromStr; 7 | 8 | pub fn extract_datatype_with_capacity(data_type: &str) -> (String, Option) { 9 | let start = data_type.find('('); 10 | let end = data_type.find(')'); 11 | if let Some(start) = start { 12 | if let Some(end) = end { 13 | let dtype = &data_type[0..start]; 14 | let range = &data_type[start + 1..end]; 15 | let capacity = if range.contains(',') { 16 | let splinters = range.split(',').collect::>(); 17 | assert!(splinters.len() == 2, "There should only be 2 parts"); 18 | let range1: Result = splinters[0].parse(); 19 | let range2: Result = splinters[1].parse(); 20 | match range1 { 21 | Ok(r1) => match range2 { 22 | Ok(r2) => Some(Capacity::Range(r1, r2)), 23 | Err(e) => { 24 | info!( 25 | "error: {} when parsing range2 for data_type: {:?}", 26 | e, data_type 27 | ); 28 | None 29 | } 30 | }, 31 | Err(e) => { 32 | info!( 33 | "error: {} when parsing range1 for data_type: {:?}", 34 | e, data_type 35 | ); 36 | None 37 | } 38 | } 39 | } else { 40 | let limit: Result = range.parse(); 41 | match limit { 42 | Ok(limit) => Some(Capacity::Limit(limit)), 43 | Err(e) => { 44 | info!( 45 | "error: {} when parsing limit for data_type: {:?}", 46 | e, data_type 47 | ); 48 | None 49 | } 50 | } 51 | }; 52 | (dtype.to_owned(), capacity) 53 | } else { 54 | (data_type.to_owned(), None) 55 | } 56 | } else { 57 | (data_type.to_owned(), None) 58 | } 59 | } 60 | 61 | pub fn cast_type(value: &Value, required_type: &SqlType) -> Value { 62 | if *value == Value::Nil || required_type.same_type(value) { 63 | value.to_owned() 64 | } else { 65 | match *value { 66 | Value::Smallint(v) => match *required_type { 67 | SqlType::Tinyint => Value::Tinyint(v as i8), 68 | SqlType::Int => Value::Int(i32::from(v)), 69 | SqlType::Bigint => Value::Bigint(i64::from(v)), 70 | _ => panic!( 71 | "unsupported conversion from {:?} to {:?}", 72 | value, required_type 73 | ), 74 | }, 75 | Value::Int(v) => match *required_type { 76 | SqlType::Tinyint => Value::Tinyint(v as i8), 77 | SqlType::Smallint => Value::Smallint(v as i16), 78 | SqlType::Bigint => Value::Bigint(i64::from(v)), 79 | _ => panic!( 80 | "unsupported conversion from {:?} to {:?}", 81 | value, required_type 82 | ), 83 | }, 84 | Value::Bigint(v) => match *required_type { 85 | SqlType::Tinyint => Value::Tinyint(v as i8), 86 | SqlType::Smallint => Value::Smallint(v as i16), 87 | SqlType::Int => Value::Int(v as i32), 88 | SqlType::Numeric => { 89 | let bigdecimal = BigDecimal::from_str(&format!("{}", v)); 90 | assert!(bigdecimal.is_ok()); 91 | Value::BigDecimal(bigdecimal.unwrap()) 92 | } 93 | SqlType::Varchar => Value::Text(format!("{}", v)), 94 | _ => panic!( 95 | "unsupported conversion from {:?} to {:?}", 96 | value, required_type 97 | ), 98 | }, 99 | Value::BigDecimal(ref v) => match *required_type { 100 | SqlType::Int => { 101 | let ival = v.to_i32(); 102 | assert!(ival.is_some()); 103 | let ival = ival.unwrap(); 104 | Value::Int(ival) 105 | } 106 | SqlType::Varchar => Value::Text(format!("{}", v)), 107 | _ => panic!( 108 | "unsupported conversion from {:?} to {:?}", 109 | value, required_type 110 | ), 111 | }, 112 | Value::Text(ref v) => { 113 | match *required_type { 114 | SqlType::Timestamp => { 115 | let ts = NaiveDateTime::parse_from_str(&v, "%Y-%m-%d %H:%M:%S"); 116 | let ts = if let Ok(ts) = ts { 117 | ts 118 | } else { 119 | let ts = NaiveDateTime::parse_from_str(&v, "%Y-%m-%d %H:%M:%S%.3f"); 120 | if let Ok(ts) = ts { 121 | ts 122 | } else { 123 | panic!("unable to parse timestamp: {}", v); 124 | } 125 | }; 126 | Value::DateTime(ts) 127 | } 128 | SqlType::Char => { 129 | assert_eq!(v.len(), 1); 130 | Value::Char(v.chars().next().unwrap()) 131 | } 132 | SqlType::Int => { 133 | if let Ok(v) = v.parse::() { 134 | Value::Int(v) 135 | } else { 136 | panic!( 137 | "unsupported conversion from {:?} to {:?}", 138 | value, required_type 139 | ); 140 | } 141 | } 142 | // enums will be just Text 143 | SqlType::Enum(_, _) => Value::Text(v.to_string()), 144 | // ts vector is casted into text and then we just 145 | // return them as text as well 146 | SqlType::TsVector => Value::Text(v.to_string()), 147 | _ => panic!( 148 | "unsupported conversion from {:?} to {:?}", 149 | value, required_type 150 | ), 151 | } 152 | } 153 | Value::Char(v) => match *required_type { 154 | SqlType::Varchar => Value::Text(format!("{}", v)), 155 | _ => panic!( 156 | "unsupported conversion from {:?} to {:?}", 157 | value, required_type 158 | ), 159 | }, 160 | _ => panic!( 161 | "unsupported conversion from {:?} to {:?}", 162 | value, required_type 163 | ), 164 | } 165 | } 166 | } 167 | -------------------------------------------------------------------------------- /src/pg/numeric.rs: -------------------------------------------------------------------------------- 1 | /// 2 | /// Copied from diesel 3 | /// 4 | use byteorder::{NetworkEndian, ReadBytesExt, WriteBytesExt}; 5 | use std::error::Error; 6 | 7 | use postgres::types::{self, FromSql, IsNull, ToSql, Type}; 8 | 9 | use bigdecimal::BigDecimal; 10 | use num_bigint::{BigInt, BigUint, Sign}; 11 | 12 | use num_integer::Integer; 13 | use num_traits::{Signed, ToPrimitive, Zero}; 14 | 15 | #[derive(Debug, Clone, PartialEq, Eq)] 16 | pub enum PgNumeric { 17 | Positive { 18 | weight: i16, 19 | scale: u16, 20 | digits: Vec, 21 | }, 22 | Negative { 23 | weight: i16, 24 | scale: u16, 25 | digits: Vec, 26 | }, 27 | NaN, 28 | } 29 | 30 | #[derive(Debug, Clone, Copy)] 31 | struct InvalidNumericSign(u16); 32 | 33 | impl ::std::fmt::Display for InvalidNumericSign { 34 | fn fmt(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result { 35 | write!(f, "InvalidNumericSign({0:x})", self.0) 36 | } 37 | } 38 | 39 | impl Error for InvalidNumericSign { 40 | fn description(&self) -> &str { 41 | "sign for numeric field was not one of 0, 0x4000, 0xC000" 42 | } 43 | } 44 | 45 | impl FromSql for PgNumeric { 46 | fn from_sql(_ty: &Type, bytes: &[u8]) -> Result> { 47 | let mut bytes = <&[u8]>::clone(&bytes); 48 | let ndigits = bytes.read_u16::()?; 49 | let mut digits = Vec::with_capacity(ndigits as usize); 50 | let weight = bytes.read_i16::()?; 51 | let sign = bytes.read_u16::()?; 52 | let scale = bytes.read_u16::()?; 53 | for _ in 0..ndigits { 54 | digits.push(bytes.read_i16::()?); 55 | } 56 | 57 | match sign { 58 | 0 => Ok(PgNumeric::Positive { 59 | weight, 60 | scale, 61 | digits, 62 | }), 63 | 0x4000 => Ok(PgNumeric::Negative { 64 | weight, 65 | scale, 66 | digits, 67 | }), 68 | 0xC000 => Ok(PgNumeric::NaN), 69 | invalid => Err(Box::new(InvalidNumericSign(invalid))), 70 | } 71 | } 72 | 73 | fn accepts(ty: &Type) -> bool { 74 | match *ty { 75 | types::NUMERIC => true, 76 | _ => panic!("can not accept type {:?}", ty), 77 | } 78 | } 79 | } 80 | 81 | impl ToSql for PgNumeric { 82 | to_sql_checked!(); 83 | 84 | fn to_sql( 85 | &self, 86 | _ty: &Type, 87 | out: &mut Vec, 88 | ) -> Result> { 89 | let sign = match *self { 90 | PgNumeric::Positive { .. } => 0, 91 | PgNumeric::Negative { .. } => 0x4000, 92 | PgNumeric::NaN => 0xC000, 93 | }; 94 | let empty_vec = Vec::new(); 95 | let digits = match *self { 96 | PgNumeric::Positive { ref digits, .. } | PgNumeric::Negative { ref digits, .. } => { 97 | digits 98 | } 99 | PgNumeric::NaN => &empty_vec, 100 | }; 101 | let weight = match *self { 102 | PgNumeric::Positive { weight, .. } | PgNumeric::Negative { weight, .. } => weight, 103 | PgNumeric::NaN => 0, 104 | }; 105 | let scale = match *self { 106 | PgNumeric::Positive { scale, .. } | PgNumeric::Negative { scale, .. } => scale, 107 | PgNumeric::NaN => 0, 108 | }; 109 | out.write_u16::(digits.len() as u16)?; 110 | out.write_i16::(weight)?; 111 | out.write_u16::(sign)?; 112 | out.write_u16::(scale)?; 113 | for digit in digits.iter() { 114 | out.write_i16::(*digit)?; 115 | } 116 | 117 | Ok(IsNull::No) 118 | } 119 | 120 | fn accepts(ty: &Type) -> bool { 121 | match *ty { 122 | types::NUMERIC => true, 123 | _ => false, 124 | } 125 | } 126 | } 127 | 128 | /// Iterator over the digits of a big uint in base 10k. 129 | /// The digits will be returned in little endian order. 130 | struct ToBase10000(Option); 131 | 132 | impl Iterator for ToBase10000 { 133 | type Item = i16; 134 | 135 | fn next(&mut self) -> Option { 136 | self.0.take().map(|v| { 137 | let (div, rem) = v.div_rem(&BigUint::from(10_000u16)); 138 | if !div.is_zero() { 139 | self.0 = Some(div); 140 | } 141 | rem.to_i16().expect("10000 always fits in an i16") 142 | }) 143 | } 144 | } 145 | 146 | impl<'a> From<&'a BigDecimal> for PgNumeric { 147 | #[allow(clippy::redundant_closure)] 148 | fn from(decimal: &'a BigDecimal) -> Self { 149 | let (mut integer, scale) = decimal.as_bigint_and_exponent(); 150 | let scale = scale as u16; 151 | integer = integer.abs(); 152 | 153 | // Ensure that the decimal will always lie on a digit boundary 154 | for _ in 0..(4 - scale % 4) { 155 | integer *= 10; 156 | } 157 | let integer = integer.to_biguint().expect("integer is always positive"); 158 | 159 | let mut digits = ToBase10000(Some(integer)).collect::>(); 160 | digits.reverse(); 161 | let digits_after_decimal = scale as u16 / 4 + 1; 162 | let weight = digits.len() as i16 - digits_after_decimal as i16 - 1; 163 | 164 | let unnecessary_zeroes = if weight >= 0 { 165 | let index_of_decimal = (weight + 1) as usize; 166 | digits 167 | .get(index_of_decimal..) 168 | .expect("enough digits exist") 169 | .iter() 170 | .rev() 171 | .take_while(|i| i.is_zero()) 172 | .count() 173 | } else { 174 | 0 175 | }; 176 | 177 | let relevant_digits = digits.len() - unnecessary_zeroes; 178 | digits.truncate(relevant_digits); 179 | 180 | match decimal.sign() { 181 | Sign::Plus => PgNumeric::Positive { 182 | digits, 183 | scale, 184 | weight, 185 | }, 186 | Sign::Minus => PgNumeric::Negative { 187 | digits, 188 | scale, 189 | weight, 190 | }, 191 | Sign::NoSign => PgNumeric::Positive { 192 | digits: vec![0], 193 | scale: 0, 194 | weight: 0, 195 | }, 196 | } 197 | } 198 | } 199 | 200 | impl From for PgNumeric { 201 | fn from(bigdecimal: BigDecimal) -> Self { 202 | (&bigdecimal).into() 203 | } 204 | } 205 | 206 | impl From for BigDecimal { 207 | fn from(numeric: PgNumeric) -> Self { 208 | let (sign, weight, _, digits) = match numeric { 209 | PgNumeric::Positive { 210 | weight, 211 | scale, 212 | digits, 213 | } => (Sign::Plus, weight, scale, digits), 214 | PgNumeric::Negative { 215 | weight, 216 | scale, 217 | digits, 218 | } => (Sign::Minus, weight, scale, digits), 219 | PgNumeric::NaN => panic!("NaN is not (yet) supported in BigDecimal"), 220 | }; 221 | let mut result = BigUint::default(); 222 | let count = digits.len() as i64; 223 | for digit in digits { 224 | result *= BigUint::from(10_000u64); 225 | result += BigUint::from(digit as u64); 226 | } 227 | // First digit got factor 10_000^(digits.len() - 1), but should get 10_000^weight 228 | let correction_exp = 4 * (i64::from(weight) - count + 1); 229 | // FIXME: `scale` allows to drop some insignificant figures, which is currently unimplemented. 230 | // This means that e.g. PostgreSQL 0.01 will be interpreted as 0.0100 231 | BigDecimal::new(BigInt::from_biguint(sign, result), -correction_exp) 232 | } 233 | } 234 | -------------------------------------------------------------------------------- /src/table.rs: -------------------------------------------------------------------------------- 1 | use crate::{types::SqlType, ColumnDef, ColumnName, TableName}; 2 | 3 | #[derive(Debug, PartialEq, Clone)] 4 | pub struct TableDef { 5 | pub name: TableName, 6 | 7 | /// comment of this table 8 | pub comment: Option, 9 | 10 | /// columns of this table 11 | pub columns: Vec, 12 | 13 | /// views can also be generated 14 | pub is_view: bool, 15 | 16 | pub table_key: Vec, 17 | } 18 | 19 | impl TableDef { 20 | pub fn complete_name(&self) -> String { 21 | self.name.complete_name() 22 | } 23 | 24 | pub fn safe_name(&self) -> String { 25 | self.name.safe_name() 26 | } 27 | 28 | pub fn safe_complete_name(&self) -> String { 29 | self.name.safe_complete_name() 30 | } 31 | 32 | pub fn get_primary_column_names(&self) -> Vec<&ColumnName> { 33 | let mut primary: Vec<&ColumnName> = vec![]; 34 | for key in &self.table_key { 35 | if let TableKey::PrimaryKey(ref pk) = key { 36 | for col in &pk.columns { 37 | primary.push(col) 38 | } 39 | } 40 | } 41 | primary.sort_by(|a, b| a.name.cmp(&b.name)); 42 | primary 43 | } 44 | 45 | pub fn get_non_primary_columns(&self) -> Vec<&ColumnDef> { 46 | let primary = self.get_primary_columns(); 47 | self.columns 48 | .iter() 49 | .filter(|c| !primary.contains(c)) 50 | .collect() 51 | } 52 | 53 | pub fn get_primary_columns(&self) -> Vec<&ColumnDef> { 54 | self.get_primary_column_names() 55 | .iter() 56 | .filter_map(|column_name| self.get_column(column_name)) 57 | .collect() 58 | } 59 | 60 | pub fn is_primary_column(&self, column: &ColumnDef) -> bool { 61 | self.get_primary_columns().contains(&column) 62 | } 63 | 64 | pub fn get_primary_column_types(&self) -> Vec<&SqlType> { 65 | self.get_primary_columns() 66 | .iter() 67 | .map(|column| &column.specification.sql_type) 68 | .collect() 69 | } 70 | 71 | /// return the foreignkyes of this table 72 | pub fn get_foreign_keys(&self) -> Vec<&ForeignKey> { 73 | let mut foreign: Vec<&ForeignKey> = vec![]; 74 | for key in &self.table_key { 75 | if let TableKey::ForeignKey(ref fk) = key { 76 | foreign.push(fk) 77 | } 78 | } 79 | foreign 80 | } 81 | 82 | /// return the table names which is foreign to this table 83 | pub fn get_foreign_tables(&self) -> Vec<&TableName> { 84 | self.get_foreign_keys() 85 | .iter() 86 | .map(|foreign| &foreign.foreign_table) 87 | .collect() 88 | } 89 | 90 | pub fn get_foreign_key_to_table(&self, table_name: &TableName) -> Option<&ForeignKey> { 91 | let foreign_keys: Vec<&ForeignKey> = self.get_foreign_keys(); 92 | for fk in foreign_keys { 93 | if fk.foreign_table == *table_name { 94 | return Some(fk); 95 | } 96 | } 97 | None 98 | } 99 | 100 | /// get the (local_columns, foreign_columns) to the table 101 | pub fn get_local_foreign_columns_pair_to_table( 102 | &self, 103 | table_name: &TableName, 104 | ) -> Vec<(&ColumnName, &ColumnName)> { 105 | let foreign_keys: Vec<&ForeignKey> = self.get_foreign_keys(); 106 | for fk in foreign_keys { 107 | if fk.foreign_table == *table_name { 108 | let mut container = vec![]; 109 | for (local_column, referred_column) in 110 | fk.columns.iter().zip(fk.referred_columns.iter()) 111 | { 112 | container.push((local_column, referred_column)); 113 | } 114 | return container; 115 | } 116 | } 117 | vec![] 118 | } 119 | 120 | fn get_foreign_columns_to_table(&self, table_name: &TableName) -> Vec<&ColumnDef> { 121 | self.get_foreign_column_names_to_table(table_name) 122 | .iter() 123 | .filter_map(|column_name| self.get_column(column_name)) 124 | .collect() 125 | } 126 | 127 | pub fn get_foreign_column_types_to_table(&self, table_name: &TableName) -> Vec<&SqlType> { 128 | self.get_foreign_columns_to_table(table_name) 129 | .iter() 130 | .map(|column| &column.specification.sql_type) 131 | .collect() 132 | } 133 | 134 | pub fn get_foreign_column_names_to_table(&self, table_name: &TableName) -> Vec<&ColumnName> { 135 | let mut foreign_columns = vec![]; 136 | let foreign_keys = self.get_foreign_key_to_table(table_name); 137 | for fk in &foreign_keys { 138 | for fk_column in &fk.columns { 139 | foreign_columns.push(fk_column); 140 | } 141 | } 142 | foreign_columns 143 | } 144 | 145 | /// get the column names of this table 146 | pub fn get_foreign_column_names(&self) -> Vec<&ColumnName> { 147 | let mut foreign_columns = vec![]; 148 | let foreign_keys = self.get_foreign_keys(); 149 | for fk in &foreign_keys { 150 | for fk_column in &fk.columns { 151 | foreign_columns.push(fk_column); 152 | } 153 | } 154 | foreign_columns 155 | } 156 | 157 | /// return the local columns of this table 158 | /// that is referred by the argument table name 159 | pub fn get_referred_columns_to_table( 160 | &self, 161 | table_name: &TableName, 162 | ) -> Option<&Vec> { 163 | let foreign_keys: Vec<&ForeignKey> = self.get_foreign_keys(); 164 | for fk in foreign_keys { 165 | if fk.foreign_table == *table_name { 166 | return Some(&fk.referred_columns); 167 | } 168 | } 169 | None 170 | } 171 | 172 | /// find the column which matches this `column_name` 173 | pub fn get_column(&self, column_name: &ColumnName) -> Option<&ColumnDef> { 174 | self.columns.iter().find(|c| c.name == *column_name) 175 | } 176 | } 177 | 178 | /// example: 179 | /// category { id, name } 180 | /// product { product_id, name, category_id } 181 | /// 182 | /// if the table in context is product and the foreign table is category 183 | /// ForeignKey{ 184 | /// name: product_category_fkey 185 | /// columns: _category_id_ 186 | /// foreign_table: category 187 | /// referred_columns: _id_ 188 | /// } 189 | #[derive(Debug, PartialEq, Clone)] 190 | pub struct ForeignKey { 191 | pub name: Option, 192 | // the local columns of this table local column = foreign_column 193 | pub columns: Vec, 194 | // referred foreign table 195 | pub foreign_table: TableName, 196 | // referred column of the foreign table 197 | // this is most likely the primary key of the table in context 198 | pub referred_columns: Vec, 199 | } 200 | 201 | #[derive(Debug, PartialEq, Clone)] 202 | pub struct Key { 203 | pub name: Option, 204 | pub columns: Vec, 205 | } 206 | 207 | #[derive(Debug, PartialEq, Clone)] 208 | pub enum TableKey { 209 | PrimaryKey(Key), 210 | UniqueKey(Key), 211 | Key(Key), 212 | ForeignKey(ForeignKey), 213 | } 214 | 215 | #[derive(Debug)] 216 | pub struct SchemaContent { 217 | pub schema: String, 218 | pub tablenames: Vec, 219 | pub views: Vec, 220 | } 221 | 222 | #[cfg(test)] 223 | #[cfg(feature = "with-postgres")] 224 | mod test { 225 | use crate::{table::*, *}; 226 | use log::*; 227 | 228 | #[test] 229 | fn film_table_info() { 230 | let db_url = "postgres://postgres:p0stgr3s@localhost:5432/sakila"; 231 | let mut pool = Pool::new(); 232 | let mut em = pool.em(db_url).expect("must be ok"); 233 | let table = em 234 | .get_table(&TableName::from("public.film")) 235 | .expect("must have a table"); 236 | println!("table: {:#?}", table); 237 | } 238 | 239 | #[test] 240 | fn test_foreign_tables() { 241 | let db_url = "postgres://postgres:p0stgr3s@localhost:5432/sakila"; 242 | let mut pool = Pool::new(); 243 | let mut em = pool.em(db_url).expect("must be ok"); 244 | let table = em 245 | .get_table(&TableName::from("public.film_actor")) 246 | .expect("must be ok") 247 | .expect("must have a table"); 248 | 249 | println!("table: {:#?}", table); 250 | let foreign_tables = table.get_foreign_tables(); 251 | println!("foreign_tables: {:#?}", foreign_tables); 252 | assert_eq!( 253 | foreign_tables, 254 | vec![ 255 | &TableName::from("public.actor"), 256 | &TableName::from("public.film"), 257 | ] 258 | ); 259 | } 260 | 261 | #[test] 262 | fn referred_columns() { 263 | let db_url = "postgres://postgres:p0stgr3s@localhost:5432/sakila"; 264 | let mut pool = Pool::new(); 265 | let em = pool.em(db_url); 266 | let mut db = pool.db(db_url).unwrap(); 267 | assert!(em.is_ok()); 268 | let film_tablename = TableName::from("public.film"); 269 | let film = db 270 | .get_table(&film_tablename) 271 | .expect("must be ok") 272 | .expect("must have a table"); 273 | let film_actor_tablename = TableName::from("public.film_actor"); 274 | let film_actor = db 275 | .get_table(&film_actor_tablename) 276 | .expect("must be ok") 277 | .expect("must have a table"); 278 | let rc = film_actor.get_referred_columns_to_table(&film.name); 279 | info!("rc: {:#?}", rc); 280 | assert_eq!( 281 | rc, 282 | Some(&vec![ColumnName { 283 | name: "film_id".to_string(), 284 | table: None, 285 | alias: None, 286 | }]) 287 | ); 288 | } 289 | 290 | #[test] 291 | fn referred_columns_hero_id() { 292 | let db_url = "postgres://postgres:p0stgr3s@localhost:5432/dota"; 293 | let mut pool = Pool::new(); 294 | let mut em = pool.em(db_url).expect("must be ok"); 295 | let hero_tablename = TableName::from("public.hero"); 296 | let hero = em 297 | .get_table(&hero_tablename) 298 | .expect("must be ok") 299 | .expect("must have a table"); 300 | 301 | let hero_ability_tablename = TableName::from("public.hero_ability"); 302 | let hero_ability = em 303 | .get_table(&hero_ability_tablename) 304 | .expect("must be ok") 305 | .expect("must have a table"); 306 | 307 | info!("hero {:#?}", hero); 308 | info!("hero ability {:#?}", hero_ability); 309 | let rc = hero_ability.get_referred_columns_to_table(&hero.name); 310 | info!("rc: {:#?}", rc); 311 | assert_eq!( 312 | rc, 313 | Some(&vec![ColumnName { 314 | name: "id".to_string(), 315 | table: None, 316 | alias: None, 317 | }]) 318 | ); 319 | let foreign_key = hero_ability.get_foreign_key_to_table(&hero.name); 320 | info!("foreign_key: {:#?}", foreign_key); 321 | assert_eq!( 322 | foreign_key, 323 | Some(&ForeignKey { 324 | name: Some("hero_id_fkey".to_string()), 325 | columns: vec![ColumnName { 326 | name: "hero_id".to_string(), 327 | table: None, 328 | alias: None, 329 | }], 330 | foreign_table: TableName { 331 | name: "hero".to_string(), 332 | schema: Some("public".to_string()), 333 | alias: None, 334 | }, 335 | referred_columns: vec![ColumnName { 336 | name: "id".to_string(), 337 | table: None, 338 | alias: None, 339 | }], 340 | }) 341 | ); 342 | } 343 | } 344 | -------------------------------------------------------------------------------- /crates/dao/src/value.rs: -------------------------------------------------------------------------------- 1 | #![allow(clippy::cast_lossless)] 2 | use crate::{interval::Interval, ConvertError}; 3 | use bigdecimal::{BigDecimal, ToPrimitive}; 4 | use chrono::{DateTime, NaiveDate, NaiveDateTime, NaiveTime, Utc}; 5 | use geo::Point; 6 | use serde_derive::{Deserialize, Serialize}; 7 | use std::fmt; 8 | use uuid::Uuid; 9 | 10 | /// Generic value storage 32 byte in size 11 | /// Some contains the same value container, but the variant is more 12 | /// important for type hinting and view presentation hinting purposes 13 | #[derive(Serialize, Deserialize, Debug, Clone, PartialEq)] 14 | pub enum Value { 15 | Nil, // no value 16 | Bool(bool), 17 | 18 | Tinyint(i8), 19 | Smallint(i16), 20 | Int(i32), 21 | Bigint(i64), 22 | 23 | Float(f32), 24 | Double(f64), 25 | BigDecimal(BigDecimal), 26 | 27 | Blob(Vec), 28 | Char(char), 29 | Text(String), 30 | Json(String), 31 | 32 | Uuid(Uuid), 33 | Date(NaiveDate), 34 | Time(NaiveTime), 35 | DateTime(NaiveDateTime), 36 | Timestamp(DateTime), 37 | Interval(Interval), 38 | 39 | Point(Point), 40 | 41 | Array(Array), 42 | } 43 | 44 | impl Value { 45 | pub fn is_nil(&self) -> bool { 46 | *self == Value::Nil 47 | } 48 | } 49 | 50 | impl fmt::Display for Value { 51 | fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { 52 | match self { 53 | Value::Nil => write!(f, ""), 54 | Value::Bool(v) => write!(f, "{}", v), 55 | Value::Tinyint(v) => write!(f, "{}", v), 56 | Value::Smallint(v) => write!(f, "{}", v), 57 | Value::Int(v) => write!(f, "{}", v), 58 | Value::Bigint(v) => write!(f, "{}", v), 59 | Value::Float(v) => write!(f, "{}", v), 60 | Value::Double(v) => write!(f, "{}", v), 61 | Value::BigDecimal(v) => write!(f, "{}", v), 62 | Value::Char(v) => write!(f, "{}", v), 63 | Value::Text(v) => write!(f, "{}", v), 64 | Value::Json(v) => write!(f, "{}", v), 65 | Value::Uuid(v) => write!(f, "{}", v), 66 | Value::Date(v) => write!(f, "{}", v), 67 | Value::Time(v) => write!(f, "{}", v), 68 | Value::DateTime(v) => write!(f, "{}", v.format("%Y-%m-%d %H:%M:%S").to_string()), 69 | Value::Timestamp(v) => write!(f, "{}", v.to_rfc3339()), 70 | Value::Array(array) => array.fmt(f), 71 | Value::Blob(v) => { 72 | let encoded = base64::encode_config(&v, base64::MIME); 73 | write!(f, "{}", encoded) 74 | } 75 | _ => panic!("not yet implemented: {:?}", self), 76 | } 77 | } 78 | } 79 | 80 | #[derive(Serialize, Deserialize, Debug, Clone, PartialEq)] 81 | pub enum Array { 82 | /* 83 | Bool(Vec), 84 | 85 | Tinyint(Vec), 86 | Smallint(Vec), 87 | */ 88 | Int(Vec), 89 | Float(Vec), 90 | /* 91 | Bigint(Vec), 92 | 93 | Double(Vec), 94 | BigDecimal(Vec), 95 | */ 96 | Text(Vec), 97 | /* 98 | Char(Vec), 99 | Uuid(Vec), 100 | Date(Vec), 101 | Timestamp(Vec>), 102 | */ 103 | } 104 | 105 | impl fmt::Display for Array { 106 | fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { 107 | match self { 108 | Array::Text(texts) => { 109 | let json_arr = serde_json::to_string(texts).expect("must serialize"); 110 | write!(f, "{}", json_arr) 111 | } 112 | Array::Float(floats) => { 113 | let json_arr = serde_json::to_string(floats).expect("must serialize"); 114 | write!(f, "{}", json_arr) 115 | } 116 | _ => panic!("not yet implemented: {:?}", self), 117 | } 118 | } 119 | } 120 | 121 | /// A trait to allow passing of parameters ergonomically 122 | /// in em.execute_sql_with_return 123 | pub trait ToValue { 124 | fn to_value(&self) -> Value; 125 | } 126 | 127 | macro_rules! impl_to_value { 128 | ($ty:ty, $variant:ident) => { 129 | impl ToValue for $ty { 130 | fn to_value(&self) -> Value { 131 | Value::$variant(self.to_owned()) 132 | } 133 | } 134 | }; 135 | } 136 | 137 | impl_to_value!(bool, Bool); 138 | impl_to_value!(i8, Tinyint); 139 | impl_to_value!(i16, Smallint); 140 | impl_to_value!(i32, Int); 141 | impl_to_value!(i64, Bigint); 142 | impl_to_value!(f32, Float); 143 | impl_to_value!(f64, Double); 144 | impl_to_value!(Vec, Blob); 145 | impl_to_value!(char, Char); 146 | impl_to_value!(String, Text); 147 | impl_to_value!(Uuid, Uuid); 148 | impl_to_value!(NaiveDate, Date); 149 | impl_to_value!(NaiveTime, Time); 150 | impl_to_value!(DateTime, Timestamp); 151 | impl_to_value!(NaiveDateTime, DateTime); 152 | 153 | impl ToValue for &str { 154 | fn to_value(&self) -> Value { 155 | Value::Text(self.to_string()) 156 | } 157 | } 158 | 159 | impl ToValue for Vec { 160 | fn to_value(&self) -> Value { 161 | Value::Array(Array::Text(self.to_owned())) 162 | } 163 | } 164 | 165 | impl ToValue for Option 166 | where 167 | T: ToValue, 168 | { 169 | fn to_value(&self) -> Value { 170 | match self { 171 | Some(v) => v.to_value(), 172 | None => Value::Nil, 173 | } 174 | } 175 | } 176 | 177 | impl ToValue for &T 178 | where 179 | T: ToValue, 180 | { 181 | fn to_value(&self) -> Value { 182 | (*self).to_value() 183 | } 184 | } 185 | 186 | impl From for Value 187 | where 188 | T: ToValue, 189 | { 190 | fn from(v: T) -> Value { 191 | v.to_value() 192 | } 193 | } 194 | 195 | pub trait FromValue: Sized { 196 | fn from_value(v: &Value) -> Result; 197 | } 198 | 199 | macro_rules! impl_from_value { 200 | ($ty: ty, $ty_name: tt, $($variant: ident),*) => { 201 | /// try from to owned 202 | impl FromValue for $ty { 203 | fn from_value(v: &Value) -> Result { 204 | match *v { 205 | $(Value::$variant(ref v) => Ok(v.to_owned() as $ty), 206 | )* 207 | _ => Err(ConvertError::NotSupported(format!("{:?}",v), $ty_name.into())), 208 | } 209 | } 210 | } 211 | } 212 | } 213 | 214 | macro_rules! impl_from_value_numeric { 215 | ($ty: ty, $method:ident, $ty_name: tt, $($variant: ident),*) => { 216 | impl FromValue for $ty { 217 | fn from_value(v: &Value) -> Result { 218 | match *v { 219 | $(Value::$variant(ref v) => Ok(v.to_owned() as $ty), 220 | )* 221 | Value::BigDecimal(ref v) => Ok(v.$method().unwrap()), 222 | _ => Err(ConvertError::NotSupported(format!("{:?}", v), $ty_name.into())), 223 | } 224 | } 225 | } 226 | } 227 | } 228 | 229 | impl_from_value!(Vec, "Vec", Blob); 230 | impl_from_value!(char, "char", Char); 231 | impl_from_value!(Uuid, "Uuid", Uuid); 232 | impl_from_value!(NaiveDate, "NaiveDate", Date); 233 | impl_from_value_numeric!(i8, to_i8, "i8", Tinyint); 234 | impl_from_value_numeric!(i16, to_i16, "i16", Tinyint, Smallint); 235 | impl_from_value_numeric!(i32, to_i32, "i32", Tinyint, Smallint, Int, Bigint); 236 | impl_from_value_numeric!(i64, to_i64, "i64", Tinyint, Smallint, Int, Bigint); 237 | impl_from_value_numeric!(f32, to_f32, "f32", Float); 238 | impl_from_value_numeric!(f64, to_f64, "f64", Float, Double); 239 | 240 | /// Char can be casted into String 241 | /// and they havea separate implementation for extracting data 242 | impl FromValue for String { 243 | fn from_value(v: &Value) -> Result { 244 | match *v { 245 | Value::Text(ref v) => Ok(v.to_owned()), 246 | Value::Char(ref v) => { 247 | let mut s = String::new(); 248 | s.push(*v); 249 | Ok(s) 250 | } 251 | Value::Blob(ref v) => String::from_utf8(v.to_owned()).map_err(|e| { 252 | ConvertError::NotSupported(format!("{:?}", v), format!("String: {}", e)) 253 | }), 254 | _ => Err(ConvertError::NotSupported( 255 | format!("{:?}", v), 256 | "String".to_string(), 257 | )), 258 | } 259 | } 260 | } 261 | 262 | impl FromValue for Vec { 263 | fn from_value(v: &Value) -> Result { 264 | match *v { 265 | Value::Array(Array::Text(ref t)) => Ok(t.to_owned()), 266 | _ => Err(ConvertError::NotSupported( 267 | format!("{:?}", v), 268 | "Vec".to_string(), 269 | )), 270 | } 271 | } 272 | } 273 | 274 | impl FromValue for bool { 275 | fn from_value(v: &Value) -> Result { 276 | match *v { 277 | Value::Bool(v) => Ok(v), 278 | Value::Tinyint(v) => Ok(v == 1), 279 | Value::Smallint(v) => Ok(v == 1), 280 | Value::Int(v) => Ok(v == 1), 281 | Value::Bigint(v) => Ok(v == 1), 282 | _ => Err(ConvertError::NotSupported( 283 | format!("{:?}", v), 284 | "bool".to_string(), 285 | )), 286 | } 287 | } 288 | } 289 | 290 | impl FromValue for DateTime { 291 | fn from_value(v: &Value) -> Result { 292 | match *v { 293 | Value::Text(ref v) => Ok(DateTime::::from_utc(parse_naive_date_time(v), Utc)), 294 | Value::DateTime(v) => Ok(DateTime::::from_utc(v, Utc)), 295 | Value::Timestamp(v) => Ok(v), 296 | _ => Err(ConvertError::NotSupported( 297 | format!("{:?}", v), 298 | "DateTime".to_string(), 299 | )), 300 | } 301 | } 302 | } 303 | 304 | impl FromValue for NaiveDateTime { 305 | fn from_value(v: &Value) -> Result { 306 | match *v { 307 | Value::Text(ref v) => Ok(parse_naive_date_time(v)), 308 | Value::DateTime(v) => Ok(v), 309 | _ => Err(ConvertError::NotSupported( 310 | format!("{:?}", v), 311 | "NaiveDateTime".to_string(), 312 | )), 313 | } 314 | } 315 | } 316 | 317 | impl FromValue for Option 318 | where 319 | T: FromValue, 320 | { 321 | fn from_value(v: &Value) -> Result { 322 | match *v { 323 | Value::Nil => Ok(None), 324 | _ => FromValue::from_value(v).map(Some), 325 | } 326 | } 327 | } 328 | 329 | fn parse_naive_date_time(v: &str) -> NaiveDateTime { 330 | let ts = NaiveDateTime::parse_from_str(&v, "%Y-%m-%d %H:%M:%S"); 331 | if let Ok(ts) = ts { 332 | ts 333 | } else { 334 | let ts = NaiveDateTime::parse_from_str(&v, "%Y-%m-%d %H:%M:%S%.3f"); 335 | if let Ok(ts) = ts { 336 | ts 337 | } else { 338 | panic!("unable to parse timestamp: {}", v); 339 | } 340 | } 341 | } 342 | 343 | #[cfg(test)] 344 | mod tests { 345 | use super::*; 346 | use chrono::offset::Utc; 347 | use std::mem::size_of; 348 | 349 | #[test] 350 | fn data_sizes() { 351 | assert_eq!(48, size_of::()); // use to be 32, now 48 due to the addition of BigDecimal type 352 | assert_eq!(24, size_of::>()); 353 | assert_eq!(24, size_of::()); 354 | assert_eq!(12, size_of::>()); 355 | assert_eq!(4, size_of::()); 356 | assert_eq!(16, size_of::()); 357 | } 358 | 359 | #[test] 360 | fn test_types() { 361 | let _: Value = 127i8.to_value(); 362 | let _: Value = 2222i16.to_value(); 363 | let _: Value = 4444i32.to_value(); 364 | let _: Value = 10000i64.to_value(); 365 | let _v1: Value = 1.0f32.to_value(); 366 | let _v2: Value = 100.0f64.to_value(); 367 | let _v3: Value = Utc::now().to_value(); 368 | let _v7: Value = Utc::today().naive_utc().to_value(); 369 | let _v4: Value = "hello world!".to_value(); 370 | let _v5: Value = "hello world!".to_string().to_value(); 371 | let _v6: Value = vec![1u8, 2, 255, 3].to_value(); 372 | } 373 | 374 | #[test] 375 | fn naive_date_parse() { 376 | let v = "2018-01-29"; 377 | let ts = NaiveDate::parse_from_str(v, "%Y-%m-%d"); 378 | println!("{:?}", ts); 379 | assert!(ts.is_ok()); 380 | } 381 | 382 | #[test] 383 | fn naive_date_time_parse() { 384 | let v = "2018-01-29 09:58:20"; 385 | let ts = NaiveDateTime::parse_from_str(v, "%Y-%m-%d %H:%M:%S"); 386 | println!("{:?}", ts); 387 | assert!(ts.is_ok()); 388 | } 389 | 390 | #[test] 391 | fn date_time_conversion() { 392 | let v = "2018-01-29 09:58:20"; 393 | let ts = NaiveDateTime::parse_from_str(v, "%Y-%m-%d %H:%M:%S"); 394 | println!("{:?}", ts); 395 | assert!(ts.is_ok()); 396 | DateTime::::from_utc(ts.unwrap(), Utc); 397 | } 398 | } 399 | -------------------------------------------------------------------------------- /src/entity.rs: -------------------------------------------------------------------------------- 1 | #[cfg(feature = "db-auth")] 2 | use crate::db_auth::{Role, User}; 3 | use crate::{ 4 | table::SchemaContent, DBPlatform, DataError, Database, DatabaseName, DbError, TableDef, 5 | ToValue, Value, 6 | }; 7 | 8 | use rustorm_dao::{FromDao, TableName, ToColumnNames, ToDao, ToTableName}; 9 | 10 | pub struct EntityManager(pub DBPlatform); 11 | 12 | impl EntityManager { 13 | pub fn begin_transaction(&mut self) -> Result<(), DbError> { 14 | self.0.begin_transaction() 15 | } 16 | 17 | pub fn commit_transaction(&mut self) -> Result<(), DbError> { 18 | self.0.commit_transaction() 19 | } 20 | 21 | pub fn rollback_transaction(&mut self) -> Result<(), DbError> { 22 | self.0.rollback_transaction() 23 | } 24 | 25 | pub fn set_session_user(&mut self, username: &str) -> Result<(), DbError> { 26 | let sql = format!("SET SESSION ROLE '{}'", username); 27 | self.0.execute_sql_with_return(&sql, &[])?; 28 | Ok(()) 29 | } 30 | 31 | #[cfg(feature = "db-auth")] 32 | pub fn get_roles(&mut self, username: &str) -> Result, DbError> { 33 | self.0.get_roles(username) 34 | } 35 | 36 | #[cfg(feature = "db-auth")] 37 | pub fn get_users(&mut self) -> Result, DbError> { 38 | self.0.get_users() 39 | } 40 | 41 | #[cfg(feature = "db-auth")] 42 | pub fn get_user_detail(&mut self, username: &str) -> Result, DbError> { 43 | match self.0.get_user_detail(username) { 44 | Ok(mut result) => match result.len() { 45 | 0 => Ok(None), 46 | 1 => Ok(Some(result.remove(0))), 47 | _ => Err(DbError::DataError(DataError::MoreThan1RecordReturned)), 48 | }, 49 | Err(e) => Err(e), 50 | } 51 | } 52 | 53 | pub fn db(&mut self) -> &mut dyn Database { 54 | &mut *self.0 55 | } 56 | 57 | /// get all the records of this table 58 | pub fn get_all(&mut self) -> Result, DbError> 59 | where 60 | T: ToTableName + ToColumnNames + FromDao, 61 | { 62 | let table = T::to_table_name(); 63 | let columns = T::to_column_names(); 64 | let enumerated_columns = columns 65 | .iter() 66 | .map(|c| c.name.to_owned()) 67 | .collect::>() 68 | .join(", "); 69 | let sql = format!( 70 | "SELECT {} FROM {}", 71 | enumerated_columns, 72 | table.complete_name() 73 | ); 74 | let rows = self.0.execute_sql_with_return(&sql, &[])?; 75 | let mut entities = vec![]; 76 | for dao in rows.iter() { 77 | let entity = T::from_dao(&dao); 78 | entities.push(entity) 79 | } 80 | Ok(entities) 81 | } 82 | 83 | /// get the table from database based on this column name 84 | pub fn get_table(&mut self, table_name: &TableName) -> Result, DbError> { 85 | self.0.get_table(table_name) 86 | } 87 | 88 | /// set the autoincrement value of the primary column(if present) of this table. 89 | /// If the primary column of this table is not an autoincrement, returns Ok(None). 90 | pub fn set_autoincrement_value( 91 | &mut self, 92 | table_name: &TableName, 93 | sequence_value: i64, 94 | ) -> Result, DbError> { 95 | self.0.set_autoincrement_value(table_name, sequence_value) 96 | } 97 | 98 | pub fn get_autoincrement_last_value( 99 | &mut self, 100 | table_name: &TableName, 101 | ) -> Result, DbError> { 102 | self.0.get_autoincrement_last_value(table_name) 103 | } 104 | 105 | /// get all the user table and views from the database 106 | pub fn get_all_tables(&mut self) -> Result, DbError> { 107 | info!("EXPENSIVE DB OPERATION: get_all_tables"); 108 | self.0.get_all_tables() 109 | } 110 | 111 | /// get all the tablenames 112 | pub fn get_tablenames(&mut self) -> Result, DbError> { 113 | self.0.get_tablenames() 114 | } 115 | 116 | /// Get the total count of records 117 | pub fn get_total_records(&mut self, table_name: &TableName) -> Result { 118 | #[derive(crate::FromDao)] 119 | struct Count { 120 | count: i64, 121 | } 122 | let sql = format!( 123 | "SELECT COUNT(*) AS count FROM {}", 124 | table_name.complete_name() 125 | ); 126 | let count: Result = self.execute_sql_with_one_return(&sql, &[]); 127 | count.map(|c| c.count as usize) 128 | } 129 | 130 | pub fn get_database_name(&mut self) -> Result, DbError> { 131 | self.0.get_database_name() 132 | } 133 | 134 | /// get all table and views grouped per schema 135 | pub fn get_grouped_tables(&mut self) -> Result, DbError> { 136 | self.0.get_grouped_tables() 137 | } 138 | 139 | #[allow(unused_variables)] 140 | pub fn insert(&mut self, entities: &[&T]) -> Result, DbError> 141 | where 142 | T: ToTableName + ToColumnNames + ToDao, 143 | R: FromDao + ToColumnNames, 144 | { 145 | match self.0 { 146 | #[cfg(feature = "with-sqlite")] 147 | DBPlatform::Sqlite(_) => self.insert_simple(entities), 148 | #[cfg(feature = "with-postgres")] 149 | DBPlatform::Postgres(_) => self.insert_bulk_with_returning_support(entities), 150 | #[cfg(feature = "with-mysql")] 151 | DBPlatform::Mysql(_) => self.insert_simple(entities), 152 | } 153 | } 154 | 155 | /// called when the platform used is postgresql 156 | pub fn insert_bulk_with_returning_support( 157 | &mut self, 158 | entities: &[&T], 159 | ) -> Result, DbError> 160 | where 161 | T: ToTableName + ToColumnNames + ToDao, 162 | R: FromDao + ToColumnNames, 163 | { 164 | let columns = T::to_column_names(); 165 | let mut sql = self.build_insert_clause(entities); 166 | let return_columns = R::to_column_names(); 167 | sql += &self.build_returning_clause(return_columns); 168 | 169 | let mut values: Vec = Vec::with_capacity(entities.len() * columns.len()); 170 | for entity in entities { 171 | let dao = entity.to_dao(); 172 | for col in columns.iter() { 173 | let value = dao.get_value(&col.name); 174 | match value { 175 | Some(value) => values.push(value.clone()), 176 | None => values.push(Value::Nil), 177 | } 178 | } 179 | } 180 | let bvalues: Vec<&Value> = values.iter().collect(); 181 | let rows = self.0.execute_sql_with_return(&sql, &bvalues)?; 182 | let mut retrieved_entities = vec![]; 183 | for dao in rows.iter() { 184 | let retrieved = R::from_dao(&dao); 185 | retrieved_entities.push(retrieved); 186 | } 187 | Ok(retrieved_entities) 188 | } 189 | 190 | /// called multiple times when using database platform that doesn;t support multiple value 191 | /// insert such as sqlite 192 | pub fn single_insert(&mut self, entity: &T) -> Result<(), DbError> 193 | where 194 | T: ToTableName + ToColumnNames + ToDao, 195 | { 196 | let columns = T::to_column_names(); 197 | let sql = self.build_insert_clause(&[entity]); 198 | let dao = entity.to_dao(); 199 | let mut values: Vec = Vec::with_capacity(columns.len()); 200 | for col in columns.iter() { 201 | let value = dao.get_value(&col.name); 202 | match value { 203 | Some(value) => values.push(value.clone()), 204 | None => values.push(Value::Nil), 205 | } 206 | } 207 | let bvalues: Vec<&Value> = values.iter().collect(); 208 | self.0.execute_sql_with_return(&sql, &bvalues)?; 209 | Ok(()) 210 | } 211 | 212 | /// this is soly for use with sqlite since sqlite doesn't support bulk insert 213 | pub fn insert_simple(&mut self, entities: &[&T]) -> Result, DbError> 214 | where 215 | T: ToTableName + ToColumnNames + ToDao, 216 | R: FromDao + ToColumnNames, 217 | { 218 | let return_columns = R::to_column_names(); 219 | let return_column_names = return_columns 220 | .iter() 221 | .map(|rc| rc.name.to_owned()) 222 | .collect::>() 223 | .join(", "); 224 | 225 | let table = T::to_table_name(); 226 | //TODO: move this specific query to sqlite 227 | let last_insert_sql = format!( 228 | "\ 229 | SELECT {} \ 230 | FROM {} \ 231 | WHERE ROWID = (\ 232 | SELECT LAST_INSERT_ROWID() FROM {})", 233 | return_column_names, 234 | table.complete_name(), 235 | table.complete_name() 236 | ); 237 | let mut retrieved_entities = vec![]; 238 | println!("sql: {}", last_insert_sql); 239 | for entity in entities { 240 | self.single_insert(*entity)?; 241 | let retrieved = self.execute_sql_with_return(&last_insert_sql, &[])?; 242 | retrieved_entities.extend(retrieved); 243 | } 244 | Ok(retrieved_entities) 245 | } 246 | 247 | /// build the returning clause 248 | fn build_returning_clause(&self, return_columns: Vec) -> String { 249 | format!( 250 | "\nRETURNING \n{}", 251 | return_columns 252 | .iter() 253 | .map(|rc| rc.name.to_owned()) 254 | .collect::>() 255 | .join(", ") 256 | ) 257 | } 258 | 259 | /// build an insert clause 260 | fn build_insert_clause(&self, entities: &[&T]) -> String 261 | where 262 | T: ToTableName + ToColumnNames + ToDao, 263 | { 264 | let table = T::to_table_name(); 265 | let columns = T::to_column_names(); 266 | let columns_len = columns.len(); 267 | let mut sql = String::new(); 268 | sql += &format!("INSERT INTO {} ", table.complete_name()); 269 | sql += &format!( 270 | "({})\n", 271 | columns 272 | .iter() 273 | .map(|c| c.name.to_owned()) 274 | .collect::>() 275 | .join(", ") 276 | ); 277 | sql += "VALUES "; 278 | sql += &entities 279 | .iter() 280 | .enumerate() 281 | .map(|(y, _)| { 282 | format!( 283 | "\n\t({})", 284 | columns 285 | .iter() 286 | .enumerate() 287 | .map(|(x, _)| { 288 | #[allow(unreachable_patterns)] 289 | match self.0 { 290 | #[cfg(feature = "with-sqlite")] 291 | DBPlatform::Sqlite(_) => format!("${}", y * columns_len + x + 1), 292 | #[cfg(feature = "with-postgres")] 293 | DBPlatform::Postgres(_) => format!("${}", y * columns_len + x + 1), 294 | #[cfg(feature = "with-mysql")] 295 | DBPlatform::Mysql(_) => "?".to_string(), 296 | _ => format!("${}", y * columns_len + x + 1), 297 | } 298 | }) 299 | .collect::>() 300 | .join(", ") 301 | ) 302 | }) 303 | .collect::>() 304 | .join(", "); 305 | sql 306 | } 307 | 308 | #[allow(clippy::redundant_closure)] 309 | pub fn execute_sql_with_return<'a, R>( 310 | &mut self, 311 | sql: &str, 312 | params: &[&'a dyn ToValue], 313 | ) -> Result, DbError> 314 | where 315 | R: FromDao, 316 | { 317 | let values: Vec = params.iter().map(|p| p.to_value()).collect(); 318 | let bvalues: Vec<&Value> = values.iter().collect(); 319 | let rows = self.0.execute_sql_with_return(sql, &bvalues)?; 320 | Ok(rows.iter().map(|dao| R::from_dao(&dao)).collect::>()) 321 | } 322 | 323 | pub fn execute_sql_with_one_return<'a, R>( 324 | &mut self, 325 | sql: &str, 326 | params: &[&'a dyn ToValue], 327 | ) -> Result 328 | where 329 | R: FromDao, 330 | { 331 | let result: Result, DbError> = self.execute_sql_with_return(sql, ¶ms); 332 | match result { 333 | Ok(mut result) => match result.len() { 334 | 0 => Err(DbError::DataError(DataError::ZeroRecordReturned)), 335 | 1 => Ok(result.remove(0)), 336 | _ => Err(DbError::DataError(DataError::MoreThan1RecordReturned)), 337 | }, 338 | Err(e) => Err(e), 339 | } 340 | } 341 | 342 | pub fn execute_sql_with_maybe_one_return<'a, R>( 343 | &mut self, 344 | sql: &str, 345 | params: &[&'a dyn ToValue], 346 | ) -> Result, DbError> 347 | where 348 | R: FromDao, 349 | { 350 | let result: Result, DbError> = self.execute_sql_with_return(sql, ¶ms); 351 | match result { 352 | Ok(mut result) => match result.len() { 353 | 0 => Ok(None), 354 | 1 => Ok(Some(result.remove(0))), 355 | _ => Err(DbError::DataError(DataError::MoreThan1RecordReturned)), 356 | }, 357 | Err(e) => Err(e), 358 | } 359 | } 360 | } 361 | -------------------------------------------------------------------------------- /src/pool.rs: -------------------------------------------------------------------------------- 1 | use cfg_if::cfg_if; 2 | use log::*; 3 | #[cfg(any(feature = "with-postgres", feature = "with-sqlite"))] 4 | use r2d2; 5 | 6 | cfg_if! {if #[cfg(feature = "with-postgres")]{ 7 | use r2d2_postgres::PostgresConnectionManager; 8 | use crate::pg::{self, PostgresDB}; 9 | }} 10 | cfg_if! {if #[cfg(feature = "with-sqlite")]{ 11 | use r2d2_sqlite::SqliteConnectionManager; 12 | use crate::sqlite::{self, SqliteDB}; 13 | }} 14 | cfg_if! {if #[cfg(feature = "with-mysql")]{ 15 | use r2d2_mysql::MysqlConnectionManager; 16 | use crate::my::{self, MysqlDB}; 17 | }} 18 | 19 | use crate::{ 20 | error::{ConnectError, ParseError}, 21 | platform::Platform, 22 | DBPlatform, DaoManager, DbError, EntityManager, 23 | }; 24 | use std::{collections::BTreeMap, convert::TryFrom}; 25 | 26 | #[derive(Default)] 27 | pub struct Pool(BTreeMap); 28 | pub enum ConnPool { 29 | #[cfg(feature = "with-postgres")] 30 | PoolPg(r2d2::Pool), 31 | #[cfg(feature = "with-sqlite")] 32 | PoolSq(r2d2::Pool), 33 | #[cfg(feature = "with-mysql")] 34 | PoolMy(r2d2::Pool), 35 | } 36 | 37 | pub enum PooledConn { 38 | #[cfg(feature = "with-postgres")] 39 | PooledPg(Box>), 40 | #[cfg(feature = "with-sqlite")] 41 | PooledSq(Box>), 42 | #[cfg(feature = "with-mysql")] 43 | PooledMy(Box>), 44 | } 45 | 46 | impl Pool { 47 | pub fn new() -> Self { 48 | Default::default() 49 | } 50 | 51 | /// ensure that a connection pool for this db_url exist 52 | /// 53 | /// Note: if that db_url already has an equivalent connection pool, this doesn't do anything 54 | pub fn ensure(&mut self, db_url: &str) -> Result<(), DbError> { 55 | info!("ensure db_url: {}", db_url); 56 | let platform: Result = TryFrom::try_from(db_url); 57 | match platform { 58 | Ok(platform) => match platform { 59 | #[cfg(feature = "with-postgres")] 60 | Platform::Postgres => { 61 | if self.0.get(db_url).is_none() { 62 | let pool_pg = pg::init_pool(db_url)?; 63 | self.0.insert(db_url.to_string(), ConnPool::PoolPg(pool_pg)); 64 | } 65 | Ok(()) 66 | } 67 | #[cfg(feature = "with-sqlite")] 68 | Platform::Sqlite(path) => { 69 | info!("matched sqlite"); 70 | if self.0.get(db_url).is_none() { 71 | let pool_sq = sqlite::init_pool(&path)?; 72 | self.0.insert(db_url.to_string(), ConnPool::PoolSq(pool_sq)); 73 | } 74 | Ok(()) 75 | } 76 | #[cfg(feature = "with-mysql")] 77 | Platform::Mysql => { 78 | if self.0.get(db_url).is_none() { 79 | let pool_my = my::init_pool(db_url)?; 80 | self.0.insert(db_url.to_string(), ConnPool::PoolMy(pool_my)); 81 | } 82 | Ok(()) 83 | } 84 | Platform::Unsupported(scheme) => { 85 | info!("unsupported"); 86 | Err(DbError::ConnectError(ConnectError::UnsupportedDb(scheme))) 87 | } 88 | }, 89 | Err(e) => Err(DbError::ConnectError(ConnectError::ParseError(e))), 90 | } 91 | } 92 | 93 | /// get the pool for this specific db_url, create one if it doesn't have yet. 94 | fn get_pool(&mut self, db_url: &str) -> Result<&ConnPool, DbError> { 95 | self.ensure(db_url)?; 96 | let platform: Result = TryFrom::try_from(db_url); 97 | match platform { 98 | Ok(platform) => match platform { 99 | #[cfg(feature = "with-postgres")] 100 | Platform::Postgres => { 101 | let conn: Option<&ConnPool> = self.0.get(db_url); 102 | if let Some(conn) = conn { 103 | Ok(conn) 104 | } else { 105 | Err(DbError::ConnectError(ConnectError::NoSuchPoolConnection)) 106 | } 107 | } 108 | #[cfg(feature = "with-sqlite")] 109 | Platform::Sqlite(_path) => { 110 | info!("getting sqlite pool"); 111 | let conn: Option<&ConnPool> = self.0.get(db_url); 112 | if let Some(conn) = conn { 113 | Ok(conn) 114 | } else { 115 | Err(DbError::ConnectError(ConnectError::NoSuchPoolConnection)) 116 | } 117 | } 118 | #[cfg(feature = "with-mysql")] 119 | Platform::Mysql => { 120 | let conn: Option<&ConnPool> = self.0.get(db_url); 121 | if let Some(conn) = conn { 122 | Ok(conn) 123 | } else { 124 | Err(DbError::ConnectError(ConnectError::NoSuchPoolConnection)) 125 | } 126 | } 127 | 128 | Platform::Unsupported(scheme) => { 129 | Err(DbError::ConnectError(ConnectError::UnsupportedDb(scheme))) 130 | } 131 | }, 132 | Err(e) => Err(DbError::ConnectError(ConnectError::ParseError(e))), 133 | } 134 | } 135 | 136 | /// get a usable database connection from 137 | pub fn connect(&mut self, db_url: &str) -> Result { 138 | let pool = self.get_pool(db_url)?; 139 | match *pool { 140 | #[cfg(feature = "with-postgres")] 141 | ConnPool::PoolPg(ref pool_pg) => { 142 | let pooled_conn = pool_pg.get(); 143 | match pooled_conn { 144 | Ok(pooled_conn) => Ok(PooledConn::PooledPg(Box::new(pooled_conn))), 145 | Err(e) => Err(DbError::ConnectError(ConnectError::R2d2Error(e))), 146 | } 147 | } 148 | #[cfg(feature = "with-sqlite")] 149 | ConnPool::PoolSq(ref pool_sq) => { 150 | let pooled_conn = pool_sq.get(); 151 | match pooled_conn { 152 | Ok(pooled_conn) => Ok(PooledConn::PooledSq(Box::new(pooled_conn))), 153 | Err(e) => Err(DbError::ConnectError(ConnectError::R2d2Error(e))), 154 | } 155 | } 156 | #[cfg(feature = "with-mysql")] 157 | ConnPool::PoolMy(ref pool_my) => { 158 | let pooled_conn = pool_my.get(); 159 | match pooled_conn { 160 | Ok(pooled_conn) => Ok(PooledConn::PooledMy(Box::new(pooled_conn))), 161 | Err(e) => Err(DbError::ConnectError(ConnectError::R2d2Error(e))), 162 | } 163 | } 164 | } 165 | } 166 | 167 | /// returns a dao manager which provides api which data is already converted into 168 | /// Dao, Rows and Value 169 | pub fn dm(&mut self, db_url: &str) -> Result { 170 | let db = self.db(db_url)?; 171 | Ok(DaoManager(db)) 172 | } 173 | 174 | /// get the pool for this specific db_url, create one if it doesn't have yet. 175 | fn get_pool_mut(&mut self, db_url: &str) -> Result<&ConnPool, DbError> { 176 | self.ensure(db_url)?; 177 | let platform: Result = TryFrom::try_from(db_url); 178 | match platform { 179 | Ok(platform) => match platform { 180 | #[cfg(feature = "with-postgres")] 181 | Platform::Postgres => { 182 | let conn: Option<&ConnPool> = self.0.get(db_url); 183 | if let Some(conn) = conn { 184 | Ok(conn) 185 | } else { 186 | Err(DbError::ConnectError(ConnectError::NoSuchPoolConnection)) 187 | } 188 | } 189 | #[cfg(feature = "with-sqlite")] 190 | Platform::Sqlite(_path) => { 191 | info!("getting sqlite pool"); 192 | let conn: Option<&ConnPool> = self.0.get(db_url); 193 | if let Some(conn) = conn { 194 | Ok(conn) 195 | } else { 196 | Err(DbError::ConnectError(ConnectError::NoSuchPoolConnection)) 197 | } 198 | } 199 | #[cfg(feature = "with-mysql")] 200 | Platform::Mysql => { 201 | let conn: Option<&ConnPool> = self.0.get(db_url); 202 | if let Some(conn) = conn { 203 | Ok(conn) 204 | } else { 205 | Err(DbError::ConnectError(ConnectError::NoSuchPoolConnection)) 206 | } 207 | } 208 | 209 | Platform::Unsupported(scheme) => { 210 | Err(DbError::ConnectError(ConnectError::UnsupportedDb(scheme))) 211 | } 212 | }, 213 | Err(e) => Err(DbError::ConnectError(ConnectError::ParseError(e))), 214 | } 215 | } 216 | 217 | /// get a usable database connection from 218 | pub fn connect_mut(&mut self, db_url: &str) -> Result { 219 | let pool = self.get_pool_mut(db_url)?; 220 | match *pool { 221 | #[cfg(feature = "with-postgres")] 222 | ConnPool::PoolPg(ref pool_pg) => { 223 | let pooled_conn = pool_pg.get(); 224 | match pooled_conn { 225 | Ok(pooled_conn) => Ok(PooledConn::PooledPg(Box::new(pooled_conn))), 226 | Err(e) => Err(DbError::ConnectError(ConnectError::R2d2Error(e))), 227 | } 228 | } 229 | #[cfg(feature = "with-sqlite")] 230 | ConnPool::PoolSq(ref pool_sq) => { 231 | let pooled_conn = pool_sq.get(); 232 | match pooled_conn { 233 | Ok(pooled_conn) => Ok(PooledConn::PooledSq(Box::new(pooled_conn))), 234 | Err(e) => Err(DbError::ConnectError(ConnectError::R2d2Error(e))), 235 | } 236 | } 237 | #[cfg(feature = "with-mysql")] 238 | ConnPool::PoolMy(ref pool_my) => { 239 | let pooled_conn = pool_my.get(); 240 | match pooled_conn { 241 | Ok(pooled_conn) => Ok(PooledConn::PooledMy(Box::new(pooled_conn))), 242 | Err(e) => Err(DbError::ConnectError(ConnectError::R2d2Error(e))), 243 | } 244 | } 245 | } 246 | } 247 | 248 | /// get a database instance with a connection, ready to send sql statements 249 | pub fn db(&mut self, db_url: &str) -> Result { 250 | let pooled_conn = self.connect_mut(db_url)?; 251 | 252 | match pooled_conn { 253 | #[cfg(feature = "with-postgres")] 254 | PooledConn::PooledPg(pooled_pg) => { 255 | Ok(DBPlatform::Postgres(Box::new(PostgresDB(*pooled_pg)))) 256 | } 257 | #[cfg(feature = "with-sqlite")] 258 | PooledConn::PooledSq(pooled_sq) => { 259 | Ok(DBPlatform::Sqlite(Box::new(SqliteDB(*pooled_sq)))) 260 | } 261 | #[cfg(feature = "with-mysql")] 262 | PooledConn::PooledMy(pooled_my) => Ok(DBPlatform::Mysql(Box::new(MysqlDB(*pooled_my)))), 263 | } 264 | } 265 | 266 | /// return an entity manager which provides a higher level api 267 | pub fn em(&mut self, db_url: &str) -> Result { 268 | let db = self.db(db_url)?; 269 | Ok(EntityManager(db)) 270 | } 271 | } 272 | 273 | pub fn test_connection(db_url: &str) -> Result<(), DbError> { 274 | let platform: Result = TryFrom::try_from(db_url); 275 | match platform { 276 | Ok(platform) => match platform { 277 | #[cfg(feature = "with-postgres")] 278 | Platform::Postgres => { 279 | pg::test_connection(db_url)?; 280 | Ok(()) 281 | } 282 | #[cfg(feature = "with-sqlite")] 283 | Platform::Sqlite(path) => { 284 | info!("testing connection: {}", path); 285 | sqlite::test_connection(&path)?; 286 | Ok(()) 287 | } 288 | #[cfg(feature = "with-mysql")] 289 | Platform::Mysql => { 290 | my::test_connection(db_url)?; 291 | Ok(()) 292 | } 293 | Platform::Unsupported(scheme) => { 294 | Err(DbError::ConnectError(ConnectError::UnsupportedDb(scheme))) 295 | } 296 | }, 297 | Err(e) => Err(DbError::ConnectError(ConnectError::ParseError(e))), 298 | } 299 | } 300 | 301 | #[cfg(test)] 302 | #[cfg(feature = "with-postgres")] 303 | mod tests_pg { 304 | use super::*; 305 | 306 | #[test] 307 | fn connect() { 308 | let db_url = "postgres://postgres:p0stgr3s@localhost:5432/sakila"; 309 | let mut pool = Pool::new(); 310 | pool.ensure(db_url).expect("Unable to initialize pool"); 311 | let pooled = pool.get_pool(db_url); 312 | match pooled { 313 | Ok(_) => info!("ok"), 314 | Err(ref e) => info!("error: {:?}", e), 315 | } 316 | assert!(pooled.is_ok()); 317 | } 318 | 319 | #[test] 320 | fn connect_no_ensure() { 321 | let db_url = "postgres://postgres:p0stgr3s@localhost:5432/sakila"; 322 | let mut pool = Pool::new(); 323 | assert!(pool.get_pool(db_url).is_ok()); 324 | } 325 | } 326 | -------------------------------------------------------------------------------- /src/my/mod.rs: -------------------------------------------------------------------------------- 1 | #[cfg(feature = "db-auth")] 2 | use crate::db_auth::{Role, User}; 3 | use crate::{ 4 | column, common, table::SchemaContent, types::SqlType, ColumnDef, ColumnName, DataError, 5 | Database, DatabaseName, DbError, FromDao, TableDef, TableName, ToValue, Value, 6 | }; 7 | use r2d2::ManageConnection; 8 | use r2d2_mysql::{self, mysql}; 9 | use rustorm_dao::{FromDao, Rows}; 10 | use thiserror::Error; 11 | 12 | pub fn init_pool( 13 | db_url: &str, 14 | ) -> Result, MysqlError> { 15 | test_connection(db_url)?; 16 | let opts = mysql::Opts::from_url(&db_url)?; 17 | let builder = mysql::OptsBuilder::from_opts(opts); 18 | let manager = r2d2_mysql::MysqlConnectionManager::new(builder); 19 | let pool = r2d2::Pool::new(manager)?; 20 | Ok(pool) 21 | } 22 | 23 | pub fn test_connection(db_url: &str) -> Result<(), MysqlError> { 24 | let opts = mysql::Opts::from_url(&db_url)?; 25 | let builder = mysql::OptsBuilder::from_opts(opts); 26 | let manager = r2d2_mysql::MysqlConnectionManager::new(builder); 27 | let mut conn = manager.connect()?; 28 | manager.is_valid(&mut conn)?; 29 | Ok(()) 30 | } 31 | 32 | pub struct MysqlDB(pub r2d2::PooledConnection); 33 | 34 | impl Database for MysqlDB { 35 | fn begin_transaction(&mut self) -> Result<(), DbError> { 36 | self.execute_sql_with_return("START TRANSACTION", &[])?; 37 | Ok(()) 38 | } 39 | 40 | fn commit_transaction(&mut self) -> Result<(), DbError> { 41 | self.execute_sql_with_return("COMMIT TRANSACTION", &[])?; 42 | Ok(()) 43 | } 44 | 45 | fn rollback_transaction(&mut self) -> Result<(), DbError> { 46 | self.execute_sql_with_return("ROLLBACK TRANSACTION", &[])?; 47 | Ok(()) 48 | } 49 | 50 | fn execute_sql_with_return(&mut self, sql: &str, param: &[&Value]) -> Result { 51 | fn collect(mut rows: mysql::QueryResult) -> Result { 52 | let column_types: Vec<_> = rows.columns_ref().iter().map(|c| c.column_type()).collect(); 53 | 54 | let column_names = rows 55 | .columns_ref() 56 | .iter() 57 | .map(|c| std::str::from_utf8(c.name_ref()).map(ToString::to_string)) 58 | .collect::, _>>() 59 | .map_err(|e| MysqlError::Utf8Error(e))?; 60 | 61 | let mut records = Rows::new(column_names); 62 | while rows.more_results_exists() { 63 | for r in rows.by_ref() { 64 | records.push(into_record(r.map_err(MysqlError::from)?, &column_types)?); 65 | } 66 | } 67 | 68 | Ok(records) 69 | } 70 | 71 | if param.is_empty() { 72 | let rows = self 73 | .0 74 | .query(&sql) 75 | .map_err(|e| MysqlError::SqlError(e, sql.to_string()))?; 76 | 77 | collect(rows) 78 | } else { 79 | let mut stmt = self 80 | .0 81 | .prepare(&sql) 82 | .map_err(|e| MysqlError::SqlError(e, sql.to_string()))?; 83 | 84 | let params: mysql::Params = param 85 | .iter() 86 | .map(|v| MyValue(v)) 87 | .map(|v| mysql::prelude::ToValue::to_value(&v)) 88 | .collect::>() 89 | .into(); 90 | 91 | let rows = stmt 92 | .execute(¶ms) 93 | .map_err(|e| MysqlError::SqlError(e, sql.to_string()))?; 94 | 95 | collect(rows) 96 | } 97 | } 98 | 99 | fn get_table(&mut self, table_name: &TableName) -> Result, DbError> { 100 | #[derive(Debug, FromDao)] 101 | struct TableSpec { 102 | schema: String, 103 | name: String, 104 | comment: String, 105 | is_view: i32, 106 | } 107 | 108 | let schema = table_name 109 | .schema 110 | .as_ref() 111 | .map(String::as_str) 112 | .unwrap_or("__DUMMY__") 113 | .into(); 114 | let table_name = &table_name.name.clone().into(); 115 | 116 | let mut tables: Vec = self 117 | .execute_sql_with_return( 118 | r#" 119 | SELECT TABLE_SCHEMA AS `schema`, 120 | TABLE_NAME AS name, 121 | TABLE_COMMENT AS comment, 122 | CASE TABLE_TYPE WHEN 'VIEW' THEN TRUE ELSE FALSE END AS is_view 123 | FROM INFORMATION_SCHEMA.TABLES 124 | WHERE TABLE_SCHEMA = CASE ? WHEN '__DUMMY__' THEN DATABASE() ELSE ? END AND TABLE_NAME = ?"#, 125 | &[ 126 | &schema, &schema, 127 | &table_name, 128 | ], 129 | )? 130 | .iter() 131 | .map(|dao| FromDao::from_dao(&dao)) 132 | .collect(); 133 | 134 | let table_spec = match tables.len() { 135 | 0 => return Err(DbError::DataError(DataError::ZeroRecordReturned)), 136 | _ => tables.remove(0), 137 | }; 138 | 139 | #[derive(Debug, FromDao)] 140 | struct ColumnSpec { 141 | schema: String, 142 | table_name: String, 143 | name: String, 144 | comment: String, 145 | type_: String, 146 | } 147 | 148 | let columns: Vec = self 149 | .execute_sql_with_return( 150 | r#" 151 | SELECT TABLE_SCHEMA AS `schema`, 152 | TABLE_NAME AS table_name, 153 | COLUMN_NAME AS name, 154 | COLUMN_COMMENT AS comment, 155 | CAST(COLUMN_TYPE as CHAR(255)) AS type_ 156 | FROM INFORMATION_SCHEMA.COLUMNS 157 | WHERE TABLE_SCHEMA = ? AND TABLE_NAME = ?"#, 158 | &[&table_spec.schema.clone().into(), &table_name], 159 | )? 160 | .iter() 161 | .map(|dao| FromDao::from_dao(&dao)) 162 | .map(|spec: ColumnSpec| { 163 | let (sql_type, capacity) = 164 | if spec.type_.starts_with("enum(") || spec.type_.starts_with("set(") { 165 | let start = spec.type_.find('('); 166 | let end = spec.type_.find(')'); 167 | if let (Some(start), Some(end)) = (start, end) { 168 | let dtype = &spec.type_[0..start]; 169 | println!("dtype: {:?}", dtype); 170 | let range = &spec.type_[start + 1..end]; 171 | let choices = range 172 | .split(',') 173 | .map(|v| v.to_owned()) 174 | .collect::>(); 175 | 176 | match dtype { 177 | "enum" => (SqlType::Enum(dtype.to_owned(), choices), None), 178 | "set" => (SqlType::Enum(dtype.to_owned(), choices), None), 179 | _ => panic!("not yet handled: {}", dtype), 180 | } 181 | } else { 182 | panic!("not yet handled spec_type: {:?}", spec.type_) 183 | } 184 | } else { 185 | let (dtype, capacity) = common::extract_datatype_with_capacity(&spec.type_); 186 | let sql_type = match &*dtype { 187 | "tinyint" | "tinyint unsigned" => SqlType::Tinyint, 188 | "smallint" | "smallint unsigned" | "year" => SqlType::Smallint, 189 | "mediumint" | "mediumint unsigned" => SqlType::Int, 190 | "int" | "int unsigned" => SqlType::Int, 191 | "bigint" | "bigin unsigned" => SqlType::Bigint, 192 | "float" | "float unsigned" => SqlType::Float, 193 | "double" | "double unsigned" => SqlType::Double, 194 | "decimal" => SqlType::Numeric, 195 | "tinyblob" => SqlType::Tinyblob, 196 | "mediumblob" => SqlType::Mediumblob, 197 | "blob" => SqlType::Blob, 198 | "longblob" => SqlType::Longblob, 199 | "binary" | "varbinary" => SqlType::Varbinary, 200 | "char" => SqlType::Char, 201 | "varchar" => SqlType::Varchar, 202 | "tinytext" => SqlType::Tinytext, 203 | "mediumtext" => SqlType::Mediumtext, 204 | "text" | "longtext" => SqlType::Text, 205 | "date" => SqlType::Date, 206 | "datetime" | "timestamp" => SqlType::Timestamp, 207 | "time" => SqlType::Time, 208 | _ => panic!("not yet handled: {}", dtype), 209 | }; 210 | 211 | (sql_type, capacity) 212 | }; 213 | 214 | ColumnDef { 215 | table: TableName::from(&format!("{}.{}", spec.schema, spec.table_name)), 216 | name: ColumnName::from(&spec.name), 217 | comment: Some(spec.comment), 218 | specification: column::ColumnSpecification { 219 | capacity, 220 | // TODO: implementation 221 | constraints: vec![], 222 | sql_type, 223 | }, 224 | stat: None, 225 | } 226 | }) 227 | .collect(); 228 | 229 | Ok(Some(TableDef { 230 | name: TableName { 231 | name: table_spec.name, 232 | schema: Some(table_spec.schema), 233 | alias: None, 234 | }, 235 | comment: Some(table_spec.comment), 236 | columns, 237 | is_view: table_spec.is_view == 1, 238 | // TODO: implementation 239 | table_key: vec![], 240 | })) 241 | } 242 | 243 | fn get_tablenames(&mut self) -> Result, DbError> { 244 | #[derive(Debug, FromDao)] 245 | struct TableNameSimple { 246 | table_name: String, 247 | } 248 | let sql = 249 | "SELECT TABLE_NAME as table_name FROM information_schema.tables WHERE TABLE_SCHEMA = database()"; 250 | 251 | let rows: Rows = self.execute_sql_with_return(sql, &[])?; 252 | println!("rows: {:#?}", rows); 253 | 254 | let result: Vec = self 255 | .execute_sql_with_return(sql, &[])? 256 | .iter() 257 | .map(|row| TableNameSimple { 258 | table_name: row.get("table_name").expect("must have a table name"), 259 | }) 260 | .collect(); 261 | let tablenames = result 262 | .iter() 263 | .map(|r| TableName::from(&r.table_name)) 264 | .collect(); 265 | Ok(tablenames) 266 | } 267 | 268 | fn get_all_tables(&mut self) -> Result, DbError> { 269 | let tablenames = self.get_tablenames()?; 270 | Ok(tablenames 271 | .iter() 272 | .filter_map(|tablename| self.get_table(tablename).ok().flatten()) 273 | .collect()) 274 | } 275 | 276 | fn get_grouped_tables(&mut self) -> Result, DbError> { 277 | let table_names = get_table_names(&mut *self, &"BASE TABLE".to_string())?; 278 | let view_names = get_table_names(&mut *self, &"VIEW".to_string())?; 279 | let schema_content = SchemaContent { 280 | schema: "".to_string(), 281 | tablenames: table_names, 282 | views: view_names, 283 | }; 284 | Ok(vec![schema_content]) 285 | } 286 | 287 | fn get_database_name(&mut self) -> Result, DbError> { 288 | let sql = "SELECT database() AS name"; 289 | let mut database_names: Vec> = 290 | self.execute_sql_with_return(&sql, &[]).map(|rows| { 291 | rows.iter() 292 | .map(|row| { 293 | row.get_opt("name") 294 | .expect("must not error") 295 | .map(|name| DatabaseName { 296 | name, 297 | description: None, 298 | }) 299 | }) 300 | .collect() 301 | })?; 302 | 303 | if database_names.len() > 0 { 304 | Ok(database_names.remove(0)) 305 | } else { 306 | Ok(None) 307 | } 308 | } 309 | 310 | #[cfg(feature = "db-auth")] 311 | fn get_users(&mut self) -> Result, DbError> { 312 | let sql = "SELECT USER as usernameFROM information_schema.user_attributes"; 313 | let rows: Result = self.execute_sql_with_return(&sql, &[]); 314 | 315 | rows.map(|rows| { 316 | rows.iter() 317 | .map(|row| User { 318 | //FIXME; this should be option 319 | sysid: 0, 320 | username: row.get("username").expect("username"), 321 | //TODO: join to the user_privileges tables 322 | is_superuser: false, 323 | is_inherit: false, 324 | can_create_db: false, 325 | can_create_role: false, 326 | can_login: false, 327 | can_do_replication: false, 328 | can_bypass_rls: false, 329 | valid_until: None, 330 | conn_limit: None, 331 | }) 332 | .collect() 333 | }) 334 | } 335 | 336 | #[cfg(feature = "db-auth")] 337 | fn get_user_detail(&mut self, _username: &str) -> Result, DbError> { 338 | todo!() 339 | } 340 | 341 | #[cfg(feature = "db-auth")] 342 | fn get_roles(&mut self, _username: &str) -> Result, DbError> { 343 | todo!() 344 | } 345 | 346 | fn set_autoincrement_value( 347 | &mut self, 348 | _table_name: &TableName, 349 | _sequence_value: i64, 350 | ) -> Result, DbError> { 351 | todo!() 352 | } 353 | 354 | fn get_autoincrement_last_value( 355 | &mut self, 356 | _table_name: &TableName, 357 | ) -> Result, DbError> { 358 | todo!() 359 | } 360 | } 361 | 362 | fn get_table_names(db: &mut dyn Database, kind: &str) -> Result, DbError> { 363 | #[derive(Debug, FromDao)] 364 | struct TableNameSimple { 365 | table_name: String, 366 | } 367 | let sql = "SELECT TABLE_NAME as table_name FROM information_schema.tables WHERE table_type= ?"; 368 | let result: Vec = db 369 | .execute_sql_with_return(sql, &[&kind.to_value()])? 370 | .iter() 371 | .map(|row| TableNameSimple { 372 | table_name: row.get("table_name").expect("must have a table name"), 373 | }) 374 | .collect(); 375 | let mut table_names = vec![]; 376 | for r in result { 377 | let table_name = TableName::from(&r.table_name); 378 | table_names.push(table_name); 379 | } 380 | Ok(table_names) 381 | } 382 | 383 | #[derive(Debug)] 384 | pub struct MyValue<'a>(&'a Value); 385 | 386 | impl mysql::prelude::ToValue for MyValue<'_> { 387 | fn to_value(&self) -> mysql::Value { 388 | match self.0 { 389 | Value::Bool(ref v) => v.into(), 390 | Value::Tinyint(ref v) => v.into(), 391 | Value::Smallint(ref v) => v.into(), 392 | Value::Int(ref v) => v.into(), 393 | Value::Bigint(ref v) => v.into(), 394 | Value::Float(ref v) => v.into(), 395 | Value::Double(ref v) => v.into(), 396 | Value::Blob(ref v) => v.into(), 397 | Value::Char(ref v) => v.to_string().into(), 398 | Value::Text(ref v) => v.into(), 399 | Value::Uuid(ref v) => v.as_bytes().into(), 400 | Value::Date(ref v) => v.into(), 401 | Value::Timestamp(ref v) => v.naive_utc().into(), 402 | Value::DateTime(ref v) => v.into(), 403 | Value::Time(ref v) => v.into(), 404 | Value::Interval(ref _v) => panic!("storing interval in DB is not supported"), 405 | Value::Json(ref v) => v.into(), 406 | Value::Nil => mysql::Value::NULL, 407 | Value::BigDecimal(_) => unimplemented!("we need to upgrade bigdecimal crate"), 408 | Value::Point(_) | Value::Array(_) => unimplemented!("unsupported type"), 409 | } 410 | } 411 | } 412 | 413 | fn into_record( 414 | mut row: mysql::Row, 415 | column_types: &[mysql::consts::ColumnType], 416 | ) -> Result, MysqlError> { 417 | use mysql::{consts::ColumnType, from_value_opt as fvo}; 418 | 419 | column_types 420 | .iter() 421 | .enumerate() 422 | .map(|(i, column_type)| { 423 | let cell: mysql::Value = row 424 | .take_opt(i) 425 | .unwrap_or_else(|| unreachable!("column length does not enough")) 426 | .unwrap_or_else(|_| unreachable!("could not convert as `mysql::Value`")); 427 | 428 | if cell == mysql::Value::NULL { 429 | return Ok(Value::Nil); 430 | } 431 | 432 | match column_type { 433 | ColumnType::MYSQL_TYPE_DECIMAL | ColumnType::MYSQL_TYPE_NEWDECIMAL => fvo(cell) 434 | .and_then(|v: Vec| { 435 | bigdecimal::BigDecimal::parse_bytes(&v, 10) 436 | .ok_or(mysql::FromValueError(mysql::Value::Bytes(v))) 437 | }) 438 | .map(Value::BigDecimal), 439 | ColumnType::MYSQL_TYPE_TINY => fvo(cell).map(Value::Tinyint), 440 | ColumnType::MYSQL_TYPE_SHORT | ColumnType::MYSQL_TYPE_YEAR => { 441 | fvo(cell).map(Value::Smallint) 442 | } 443 | ColumnType::MYSQL_TYPE_LONG | ColumnType::MYSQL_TYPE_INT24 => { 444 | fvo(cell).map(Value::Int) 445 | } 446 | ColumnType::MYSQL_TYPE_LONGLONG => fvo(cell).map(Value::Bigint), 447 | ColumnType::MYSQL_TYPE_FLOAT => fvo(cell).map(Value::Float), 448 | ColumnType::MYSQL_TYPE_DOUBLE => fvo(cell).map(Value::Double), 449 | ColumnType::MYSQL_TYPE_NULL => fvo(cell).map(|_: mysql::Value| Value::Nil), 450 | ColumnType::MYSQL_TYPE_TIMESTAMP => fvo(cell).map(|v: chrono::NaiveDateTime| { 451 | Value::Timestamp(chrono::DateTime::from_utc(v, chrono::Utc)) 452 | }), 453 | ColumnType::MYSQL_TYPE_DATE | ColumnType::MYSQL_TYPE_NEWDATE => { 454 | fvo(cell).map(Value::Date) 455 | } 456 | ColumnType::MYSQL_TYPE_TIME => fvo(cell).map(Value::Time), 457 | ColumnType::MYSQL_TYPE_DATETIME => fvo(cell).map(Value::DateTime), 458 | ColumnType::MYSQL_TYPE_VARCHAR 459 | | ColumnType::MYSQL_TYPE_VAR_STRING 460 | | ColumnType::MYSQL_TYPE_STRING => fvo(cell).map(Value::Text), 461 | ColumnType::MYSQL_TYPE_JSON => fvo(cell).map(Value::Json), 462 | ColumnType::MYSQL_TYPE_TINY_BLOB 463 | | ColumnType::MYSQL_TYPE_MEDIUM_BLOB 464 | | ColumnType::MYSQL_TYPE_LONG_BLOB 465 | | ColumnType::MYSQL_TYPE_BLOB => fvo(cell).map(Value::Blob), 466 | ColumnType::MYSQL_TYPE_TIMESTAMP2 467 | | ColumnType::MYSQL_TYPE_DATETIME2 468 | | ColumnType::MYSQL_TYPE_TIME2 => { 469 | panic!("only used in server side: {:?}", column_type) 470 | } 471 | ColumnType::MYSQL_TYPE_BIT 472 | | ColumnType::MYSQL_TYPE_ENUM 473 | | ColumnType::MYSQL_TYPE_SET 474 | | ColumnType::MYSQL_TYPE_GEOMETRY => { 475 | panic!("not yet handling this kind: {:?}", column_type) 476 | } 477 | } 478 | .map_err(MysqlError::from) 479 | }) 480 | .collect() 481 | } 482 | 483 | #[derive(Debug, Error)] 484 | pub enum MysqlError { 485 | #[error("{0}")] 486 | UrlError(#[from] mysql::UrlError), 487 | #[error("Error executing {1}: {0}")] 488 | SqlError(mysql::Error, String), 489 | #[error("{0}")] 490 | Utf8Error(#[from] std::str::Utf8Error), 491 | #[error("{0}")] 492 | ConvertError(#[from] mysql::FromValueError), 493 | #[error("Pool initialization error: {0}")] 494 | PoolInitializationError(#[from] r2d2::Error), 495 | } 496 | 497 | impl From for MysqlError { 498 | fn from(e: mysql::Error) -> Self { 499 | MysqlError::SqlError(e, "Generic Error".into()) 500 | } 501 | } 502 | -------------------------------------------------------------------------------- /src/pg/table_info.rs: -------------------------------------------------------------------------------- 1 | //! module table_info extract the table meta data using SQL queries on pg_catalog. 2 | //! This is not using information_schema since there is a performance issue with it. 3 | use crate::{ 4 | pg::column_info, 5 | table::{self, ForeignKey, Key, SchemaContent, TableDef, TableKey}, 6 | ColumnDef, ColumnName, DataError, Database, DbError, FromDao, TableName, Value, 7 | }; 8 | use log::*; 9 | use rustorm_dao::value::ToValue; 10 | 11 | pub fn get_tablenames(db: &mut dyn Database) -> Result, DbError> { 12 | #[derive(Debug, FromDao)] 13 | struct TableNameSimple { 14 | name: String, 15 | schema: String, 16 | } 17 | impl TableNameSimple { 18 | fn to_tablename(&self) -> TableName { 19 | TableName { 20 | name: self.name.to_string(), 21 | schema: Some(self.schema.to_string()), 22 | alias: None, 23 | } 24 | } 25 | } 26 | let sql = r#"SELECT 27 | pg_class.relname AS name, 28 | pg_namespace.nspname AS schema 29 | FROM pg_class 30 | LEFT JOIN pg_namespace 31 | ON pg_namespace.oid = pg_class.relnamespace 32 | WHERE 33 | pg_class.relkind IN ('r','v') 34 | AND pg_namespace.nspname NOT IN ('information_schema', 'pg_catalog', 'pg_toast') 35 | AND (has_table_privilege(pg_class.oid, 'SELECT') 36 | OR has_any_column_privilege(pg_class.oid, 'SELECT') 37 | ) 38 | ORDER BY nspname, relname 39 | "#; 40 | 41 | let rows = db.execute_sql_with_return(sql, &[]); 42 | let tablenames_simple: Result, DbError> = rows.map(|rows| { 43 | rows.iter() 44 | .map(|row| { 45 | let name: String = row.get("name").expect("must have table name"); 46 | let schema: String = row.get("schema").expect("must have schema"); 47 | TableNameSimple { name, schema } 48 | }) 49 | .collect() 50 | }); 51 | match tablenames_simple { 52 | Ok(simples) => { 53 | let table_names = simples.iter().map(|simple| simple.to_tablename()).collect(); 54 | Ok(table_names) 55 | } 56 | Err(e) => Err(e), 57 | } 58 | } 59 | 60 | /// get all database tables and views except from special schema 61 | pub fn get_all_tables(db: &mut dyn Database) -> Result, DbError> { 62 | let tablenames = get_tablenames(db)?; 63 | Ok(tablenames 64 | .iter() 65 | .filter_map(|tablename| get_table(db, tablename).ok().flatten()) 66 | .collect()) 67 | } 68 | 69 | enum TableKind { 70 | TableDef, 71 | View, 72 | } 73 | impl TableKind { 74 | fn to_sql_char(&self) -> char { 75 | match *self { 76 | TableKind::TableDef => 'r', 77 | TableKind::View => 'v', 78 | } 79 | } 80 | } 81 | 82 | /// get all database tables or views from this schema 83 | fn get_schema_tables( 84 | db: &mut dyn Database, 85 | schema: &str, 86 | kind: &TableKind, 87 | ) -> Result, DbError> { 88 | #[derive(Debug, FromDao)] 89 | struct TableNameSimple { 90 | name: String, 91 | schema: String, 92 | } 93 | impl TableNameSimple { 94 | fn to_tablename(&self) -> TableName { 95 | TableName { 96 | name: self.name.to_string(), 97 | schema: Some(self.schema.to_string()), 98 | alias: None, 99 | } 100 | } 101 | } 102 | let sql = r#"SELECT 103 | pg_class.relname AS name, 104 | pg_namespace.nspname AS schema 105 | FROM pg_class 106 | LEFT JOIN pg_namespace 107 | ON pg_namespace.oid = pg_class.relnamespace 108 | WHERE 109 | pg_class.relkind = $2::char 110 | AND pg_namespace.nspname = $1 111 | ORDER BY relname 112 | "#; 113 | let tablenames_simple: Result, DbError> = db 114 | .execute_sql_with_return( 115 | sql, 116 | &[ 117 | &Value::Text(schema.to_string()), 118 | &Value::Char(kind.to_sql_char()), 119 | ], 120 | ) 121 | .map(|rows| { 122 | rows.iter() 123 | .map(|row| { 124 | let name: String = row.get("name").expect("must have table name"); 125 | let schema: String = row.get("schema").expect("must have schema"); 126 | TableNameSimple { name, schema } 127 | }) 128 | .collect() 129 | }); 130 | match tablenames_simple { 131 | Ok(simples) => { 132 | let mut table_names = Vec::with_capacity(simples.len()); 133 | for simple in simples { 134 | table_names.push(simple.to_tablename()); 135 | } 136 | Ok(table_names) 137 | } 138 | Err(e) => Err(e), 139 | } 140 | } 141 | 142 | /// get all user created schema 143 | /// special tables such as: information_schema, pg_catalog, pg_toast, pg_temp_1, pg_toast_temp_1, 144 | /// etc. are excluded 145 | fn get_schemas(db: &mut dyn Database) -> Result, DbError> { 146 | let sql = r#"SELECT 147 | pg_namespace.nspname AS schema 148 | FROM pg_namespace 149 | WHERE 150 | pg_namespace.nspname NOT IN ('information_schema', 'pg_catalog', 'pg_toast') 151 | AND pg_namespace.nspname NOT LIKE 'pg_temp_%' 152 | AND pg_namespace.nspname NOT LIKE 'pg_toast_temp_%' 153 | ORDER BY nspname 154 | "#; 155 | db.execute_sql_with_return(sql, &[]).map(|rows| { 156 | rows.iter() 157 | .map(|row| row.get("schema").expect("must have schema")) 158 | .collect() 159 | }) 160 | } 161 | 162 | /// get the table and views of this database organized per schema 163 | pub fn get_organized_tables(db: &mut dyn Database) -> Result, DbError> { 164 | let schemas = get_schemas(db); 165 | match schemas { 166 | Ok(schemas) => { 167 | let mut contents = Vec::with_capacity(schemas.len()); 168 | for schema in schemas { 169 | let tables = get_schema_tables(db, &schema, &TableKind::TableDef)?; 170 | let views = get_schema_tables(db, &schema, &TableKind::View)?; 171 | info!("views: {:#?}", views); 172 | contents.push(SchemaContent { 173 | schema: schema.to_string(), 174 | tablenames: tables, 175 | views, 176 | }); 177 | } 178 | Ok(contents) 179 | } 180 | Err(e) => Err(e), 181 | } 182 | } 183 | 184 | /// get the table definition, its columns and table_keys 185 | pub fn get_table( 186 | db: &mut dyn Database, 187 | table_name: &TableName, 188 | ) -> Result, DbError> { 189 | #[derive(Debug, FromDao)] 190 | struct TableSimple { 191 | name: String, 192 | schema: String, 193 | comment: Option, 194 | is_view: bool, 195 | } 196 | 197 | impl TableSimple { 198 | fn to_table(&self, columns: Vec, table_key: Vec) -> TableDef { 199 | TableDef { 200 | name: TableName { 201 | name: self.name.to_string(), 202 | schema: Some(self.schema.to_string()), 203 | alias: None, 204 | }, 205 | comment: self.comment.clone(), 206 | columns, 207 | is_view: self.is_view, 208 | table_key, 209 | } 210 | } 211 | } 212 | 213 | let sql = r#"SELECT pg_class.relname as name, 214 | pg_namespace.nspname as schema, 215 | CASE WHEN pg_class.relkind = 'v' THEN true ELSE false 216 | END AS is_view, 217 | obj_description(pg_class.oid) as comment 218 | FROM pg_class 219 | LEFT JOIN pg_namespace 220 | ON pg_namespace.oid = pg_class.relnamespace 221 | WHERE pg_class.relname = $1 222 | AND pg_namespace.nspname = $2 223 | "#; 224 | 225 | let schema = match table_name.schema { 226 | Some(ref schema) => schema.to_string(), 227 | None => "public".to_string(), 228 | }; 229 | 230 | let mut table_simples: Vec = db 231 | .execute_sql_with_return( 232 | &sql, 233 | &[ 234 | &Value::Text(table_name.name.to_string()), 235 | &Value::Text(schema), 236 | ], 237 | ) 238 | .map(|rows| { 239 | rows.iter() 240 | .map(|row| TableSimple { 241 | name: row.get("name").expect("must have a table name"), 242 | schema: row.get("schema").expect("must have a schema"), 243 | comment: row.get_opt("comment").expect("must not error"), 244 | is_view: row.get("is_view").expect("must have is_view"), 245 | }) 246 | .collect() 247 | })?; 248 | 249 | match table_simples.len() { 250 | 0 => Ok(None), 251 | 1 => { 252 | let table_simple = table_simples.remove(0); 253 | let columns: Vec = column_info::get_columns(db, table_name)?; 254 | let keys: Vec = get_table_key(db, table_name)?; 255 | let table: TableDef = table_simple.to_table(columns, keys); 256 | Ok(Some(table)) 257 | } 258 | _ => Err(DbError::DataError(DataError::MoreThan1RecordReturned)), 259 | } 260 | } 261 | 262 | /// column name only 263 | #[derive(Debug, FromDao)] 264 | struct ColumnNameSimple { 265 | column: String, 266 | } 267 | impl ColumnNameSimple { 268 | fn to_columnname(&self) -> ColumnName { 269 | ColumnName { 270 | name: self.column.to_string(), 271 | table: None, 272 | alias: None, 273 | } 274 | } 275 | } 276 | 277 | /// get the column names involved in a Primary key or unique key 278 | fn get_columnname_from_key( 279 | db: &mut dyn Database, 280 | key_name: &str, 281 | table_name: &TableName, 282 | ) -> Result, DbError> { 283 | let sql = r#"SELECT pg_attribute.attname as column 284 | FROM pg_attribute 285 | JOIN pg_class 286 | ON pg_class.oid = pg_attribute.attrelid 287 | LEFT JOIN pg_namespace 288 | ON pg_namespace.oid = pg_class.relnamespace 289 | LEFT JOIN pg_constraint 290 | ON pg_constraint.conrelid = pg_class.oid 291 | AND pg_attribute.attnum = ANY (pg_constraint.conkey) 292 | WHERE pg_namespace.nspname = $3 293 | AND pg_class.relname = $2 294 | AND pg_attribute.attnum > 0 295 | AND pg_constraint.conname = $1 296 | "#; 297 | let schema = match table_name.schema { 298 | Some(ref schema) => schema.to_string(), 299 | None => "public".to_string(), 300 | }; 301 | 302 | let column_name_simple: Result, DbError> = db 303 | .execute_sql_with_return( 304 | &sql, 305 | &[ 306 | &key_name.to_value(), 307 | &table_name.name.to_value(), 308 | &schema.to_value(), 309 | ], 310 | ) 311 | .map(|rows| { 312 | rows.iter() 313 | .map(|row| ColumnNameSimple { 314 | column: row.get("column").expect("a column"), 315 | }) 316 | .collect() 317 | }); 318 | match column_name_simple { 319 | Ok(column_name_simple) => { 320 | let mut column_names = vec![]; 321 | for simple in column_name_simple { 322 | let column_name = simple.to_columnname(); 323 | column_names.push(column_name); 324 | } 325 | Ok(column_names) 326 | } 327 | Err(e) => Err(e), 328 | } 329 | } 330 | 331 | /// get the Primary keys, Unique keys of this table 332 | fn get_table_key(db: &mut dyn Database, table_name: &TableName) -> Result, DbError> { 333 | #[derive(Debug, FromDao)] 334 | struct TableKeySimple { 335 | key_name: String, 336 | is_primary_key: bool, 337 | is_unique_key: bool, 338 | is_foreign_key: bool, 339 | } 340 | 341 | impl TableKeySimple { 342 | fn to_table_key(&self, db: &mut dyn Database, table_name: &TableName) -> TableKey { 343 | if self.is_primary_key { 344 | let primary = Key { 345 | name: Some(self.key_name.to_owned()), 346 | columns: get_columnname_from_key(db, &self.key_name, table_name).unwrap(), 347 | }; 348 | TableKey::PrimaryKey(primary) 349 | } else if self.is_unique_key { 350 | let unique = Key { 351 | name: Some(self.key_name.to_owned()), 352 | columns: get_columnname_from_key(db, &self.key_name, table_name).unwrap(), 353 | }; 354 | TableKey::UniqueKey(unique) 355 | } else if self.is_foreign_key { 356 | let foreign_key = get_foreign_key(db, &self.key_name, table_name).unwrap(); 357 | TableKey::ForeignKey(foreign_key) 358 | } else { 359 | let key = table::Key { 360 | name: Some(self.key_name.to_owned()), 361 | columns: get_columnname_from_key(db, &self.key_name, table_name).unwrap(), 362 | }; 363 | TableKey::Key(key) 364 | } 365 | } 366 | } 367 | 368 | let sql = r#"SELECT conname AS key_name, 369 | CASE WHEN contype = 'p' THEN true ELSE false END AS is_primary_key, 370 | CASE WHEN contype = 'u' THEN true ELSE false END AS is_unique_key, 371 | CASE WHEN contype = 'f' THEN true ELSE false END AS is_foreign_key 372 | FROM pg_constraint 373 | LEFT JOIN pg_class 374 | ON pg_class.oid = pg_constraint.conrelid 375 | LEFT JOIN pg_namespace 376 | ON pg_namespace.oid = pg_class.relnamespace 377 | LEFT JOIN pg_class AS g 378 | ON pg_constraint.confrelid = g.oid 379 | WHERE pg_class.relname = $1 380 | AND pg_namespace.nspname = $2 381 | ORDER BY is_primary_key DESC, is_unique_key DESC, is_foreign_key DESC 382 | "#; 383 | 384 | let schema = match table_name.schema { 385 | Some(ref schema) => schema.to_string(), 386 | None => "public".to_string(), 387 | }; 388 | 389 | let table_key_simple: Result, DbError> = db 390 | .execute_sql_with_return(&sql, &[&table_name.name.to_value(), &schema.to_value()]) 391 | .map(|rows| { 392 | rows.iter() 393 | .map(|row| TableKeySimple { 394 | key_name: row.get("key_name").expect("a key_name"), 395 | is_primary_key: row.get("is_primary_key").expect("is_primary_key"), 396 | is_unique_key: row.get("is_unique_key").expect("is_unique_key"), 397 | is_foreign_key: row.get("is_foreign_key").expect("is_foreign_key"), 398 | }) 399 | .collect() 400 | }); 401 | match table_key_simple { 402 | Ok(table_key_simple) => { 403 | let mut table_keys = vec![]; 404 | for simple in table_key_simple { 405 | let table_key = simple.to_table_key(db, table_name); 406 | table_keys.push(table_key); 407 | } 408 | Ok(table_keys) 409 | } 410 | Err(e) => Err(e), 411 | } 412 | } 413 | 414 | /// get the foreign key detail of this key name 415 | fn get_foreign_key( 416 | db: &mut dyn Database, 417 | foreign_key: &str, 418 | table_name: &TableName, 419 | ) -> Result { 420 | #[derive(Debug, FromDao)] 421 | struct ForeignKeySimple { 422 | key_name: String, 423 | foreign_table: String, 424 | foreign_schema: Option, 425 | } 426 | impl ForeignKeySimple { 427 | fn to_foreign_key( 428 | &self, 429 | columns: Vec, 430 | referred_columns: Vec, 431 | ) -> ForeignKey { 432 | ForeignKey { 433 | name: Some(self.key_name.to_string()), 434 | columns, 435 | foreign_table: TableName { 436 | name: self.foreign_table.to_string(), 437 | schema: self.foreign_schema.clone(), 438 | alias: None, 439 | }, 440 | referred_columns, 441 | } 442 | } 443 | } 444 | let sql = r#"SELECT DISTINCT conname AS key_name, 445 | pg_class.relname AS foreign_table, 446 | (SELECT pg_namespace.nspname FROM pg_namespace WHERE pg_namespace.oid = pg_class.relnamespace) AS foreign_schema 447 | FROM pg_constraint 448 | LEFT JOIN pg_class 449 | ON pg_constraint.confrelid = pg_class.oid 450 | WHERE pg_constraint.conname = $1 451 | "#; 452 | 453 | let foreign_key_simple: Result, DbError> = db 454 | .execute_sql_with_return(&sql, &[&foreign_key.to_value()]) 455 | .map(|rows| { 456 | rows.iter() 457 | .map(|row| ForeignKeySimple { 458 | key_name: row.get("key_name").expect("key_name"), 459 | foreign_table: row.get("foreign_table").expect("foreign_table"), 460 | foreign_schema: row.get_opt("foreign_schema").expect("foreign_schema"), 461 | }) 462 | .collect() 463 | }); 464 | match foreign_key_simple { 465 | Ok(mut simple) => { 466 | assert_eq!(simple.len(), 1); 467 | let simple = simple.remove(0); 468 | let columns: Vec = get_columnname_from_key(db, foreign_key, table_name)?; 469 | let referred_columns: Vec = get_referred_foreign_columns(db, foreign_key)?; 470 | let foreign = simple.to_foreign_key(columns, referred_columns); 471 | Ok(foreign) 472 | } 473 | Err(e) => Err(e), 474 | } 475 | } 476 | 477 | fn get_referred_foreign_columns( 478 | db: &mut dyn Database, 479 | foreign_key: &str, 480 | ) -> Result, DbError> { 481 | let sql = r#"SELECT DISTINCT conname AS key_name, 482 | pg_attribute.attname AS column 483 | FROM pg_constraint 484 | LEFT JOIN pg_class 485 | ON pg_constraint.confrelid = pg_class.oid 486 | LEFT JOIN pg_attribute 487 | ON pg_attribute.attnum = ANY (pg_constraint.confkey) 488 | AND pg_class.oid = pg_attribute.attrelid 489 | WHERE pg_constraint.conname = $1 490 | "#; 491 | 492 | let foreign_columns: Result, DbError> = db 493 | .execute_sql_with_return(&sql, &[&foreign_key.to_value()]) 494 | .map(|rows| { 495 | rows.iter() 496 | .map(|row| ColumnNameSimple { 497 | column: row.get("column").expect("a column"), 498 | }) 499 | .collect() 500 | }); 501 | match foreign_columns { 502 | Ok(foreign_columns) => { 503 | let mut column_names = vec![]; 504 | for simple in foreign_columns { 505 | let column_name = simple.to_columnname(); 506 | column_names.push(column_name); 507 | } 508 | Ok(column_names) 509 | } 510 | Err(e) => Err(e), 511 | } 512 | } 513 | 514 | #[cfg(test)] 515 | mod test { 516 | 517 | use crate::{pg::table_info::*, Pool, TableName}; 518 | 519 | #[test] 520 | fn all_schemas() { 521 | let db_url = "postgres://postgres:p0stgr3s@localhost:5432/sakila"; 522 | let mut pool = Pool::new(); 523 | let db = pool.db(db_url); 524 | assert!(db.is_ok()); 525 | let mut db = db.unwrap(); 526 | let schemas = get_schemas(&mut *db); 527 | info!("schemas: {:#?}", schemas); 528 | assert!(schemas.is_ok()); 529 | let schemas = schemas.unwrap(); 530 | assert_eq!(schemas, vec!["public"]); 531 | } 532 | 533 | #[test] 534 | fn all_tables() { 535 | let db_url = "postgres://postgres:p0stgr3s@localhost:5432/sakila"; 536 | let mut pool = Pool::new(); 537 | let db = pool.db(db_url); 538 | assert!(db.is_ok()); 539 | let mut db = db.unwrap(); 540 | let tables = get_all_tables(&mut *db); 541 | info!("tables: {:#?}", tables); 542 | assert!(tables.is_ok()); 543 | assert_eq!(30, tables.unwrap().len()); 544 | } 545 | 546 | #[test] 547 | fn table_actor() { 548 | let db_url = "postgres://postgres:p0stgr3s@localhost:5432/sakila"; 549 | let mut pool = Pool::new(); 550 | let db = pool.db(db_url); 551 | assert!(db.is_ok()); 552 | let mut db = db.unwrap(); 553 | let table = TableName::from("actor"); 554 | let table = get_table(&mut *db, &table) 555 | .expect("must be ok") 556 | .expect("must have value"); 557 | 558 | info!("table: {:#?}", table); 559 | 560 | assert_eq!( 561 | table.table_key, 562 | vec![TableKey::PrimaryKey(Key { 563 | name: Some("actor_pkey".to_string()), 564 | columns: vec![ColumnName { 565 | name: "actor_id".to_string(), 566 | table: None, 567 | alias: None, 568 | }], 569 | })] 570 | ); 571 | } 572 | 573 | #[test] 574 | fn foreign_key_with_different_referred_column() { 575 | let db_url = "postgres://postgres:p0stgr3s@localhost:5432/sakila"; 576 | let mut pool = Pool::new(); 577 | let db = pool.db(db_url); 578 | assert!(db.is_ok()); 579 | let mut db = db.unwrap(); 580 | let table = TableName::from("store"); 581 | let table = get_table(&mut *db, &table) 582 | .expect("must be ok") 583 | .expect("must have a value"); 584 | 585 | assert_eq!( 586 | table.table_key, 587 | vec![ 588 | TableKey::PrimaryKey(Key { 589 | name: Some("store_pkey".into()), 590 | columns: vec![ColumnName { 591 | name: "store_id".into(), 592 | table: None, 593 | alias: None, 594 | }], 595 | }), 596 | TableKey::ForeignKey(ForeignKey { 597 | name: Some("store_address_id_fkey".into()), 598 | columns: vec![ColumnName { 599 | name: "address_id".into(), 600 | table: None, 601 | alias: None, 602 | }], 603 | foreign_table: TableName { 604 | name: "address".into(), 605 | schema: Some("public".into()), 606 | alias: None, 607 | }, 608 | referred_columns: vec![ColumnName { 609 | name: "address_id".into(), 610 | table: None, 611 | alias: None, 612 | }], 613 | }), 614 | TableKey::ForeignKey(ForeignKey { 615 | name: Some("store_manager_staff_id_fkey".into()), 616 | columns: vec![ColumnName { 617 | name: "manager_staff_id".into(), 618 | table: None, 619 | alias: None, 620 | }], 621 | foreign_table: TableName { 622 | name: "staff".into(), 623 | schema: Some("public".into()), 624 | alias: None, 625 | }, 626 | referred_columns: vec![ColumnName { 627 | name: "staff_id".into(), 628 | table: None, 629 | alias: None, 630 | }], 631 | }), 632 | ] 633 | ); 634 | } 635 | 636 | #[test] 637 | fn table_film_actor() { 638 | let db_url = "postgres://postgres:p0stgr3s@localhost:5432/sakila"; 639 | let mut pool = Pool::new(); 640 | let db = pool.db(db_url); 641 | assert!(db.is_ok()); 642 | let mut db = db.unwrap(); 643 | let table = TableName::from("film_actor"); 644 | let table = get_table(&mut *db, &table) 645 | .expect("must be ok") 646 | .expect("must have a value"); 647 | info!("table: {:#?}", table); 648 | 649 | assert_eq!( 650 | table.table_key, 651 | vec![ 652 | TableKey::PrimaryKey(Key { 653 | name: Some("film_actor_pkey".into()), 654 | columns: vec![ 655 | ColumnName { 656 | name: "actor_id".into(), 657 | table: None, 658 | alias: None, 659 | }, 660 | ColumnName { 661 | name: "film_id".into(), 662 | table: None, 663 | alias: None, 664 | }, 665 | ], 666 | }), 667 | TableKey::ForeignKey(ForeignKey { 668 | name: Some("film_actor_actor_id_fkey".into()), 669 | columns: vec![ColumnName { 670 | name: "actor_id".into(), 671 | table: None, 672 | alias: None, 673 | }], 674 | foreign_table: TableName { 675 | name: "actor".into(), 676 | schema: Some("public".into()), 677 | alias: None, 678 | }, 679 | referred_columns: vec![ColumnName { 680 | name: "actor_id".into(), 681 | table: None, 682 | alias: None, 683 | }], 684 | }), 685 | TableKey::ForeignKey(ForeignKey { 686 | name: Some("film_actor_film_id_fkey".into()), 687 | columns: vec![ColumnName { 688 | name: "film_id".into(), 689 | table: None, 690 | alias: None, 691 | }], 692 | foreign_table: TableName { 693 | name: "film".into(), 694 | schema: Some("public".into()), 695 | alias: None, 696 | }, 697 | referred_columns: vec![ColumnName { 698 | name: "film_id".into(), 699 | table: None, 700 | alias: None, 701 | }], 702 | }), 703 | ] 704 | ); 705 | } 706 | 707 | #[test] 708 | fn composite_foreign_key() { 709 | let db_url = "postgres://postgres:p0stgr3s@localhost:5432/sakila"; 710 | let mut pool = Pool::new(); 711 | let db = pool.db(db_url); 712 | assert!(db.is_ok()); 713 | let mut db = db.unwrap(); 714 | let table = TableName::from("film_actor_awards"); 715 | let table = get_table(&mut *db, &table) 716 | .expect("must be ok") 717 | .expect("must have a value"); 718 | info!("table: {:#?}", table); 719 | assert_eq!( 720 | table.table_key, 721 | vec![ 722 | TableKey::PrimaryKey(Key { 723 | name: Some("film_actor_awards_pkey".into()), 724 | columns: vec![ 725 | ColumnName { 726 | name: "actor_id".into(), 727 | table: None, 728 | alias: None, 729 | }, 730 | ColumnName { 731 | name: "film_id".into(), 732 | table: None, 733 | alias: None, 734 | }, 735 | ColumnName { 736 | name: "award".into(), 737 | table: None, 738 | alias: None, 739 | }, 740 | ], 741 | }), 742 | TableKey::ForeignKey(ForeignKey { 743 | name: Some("film_actor_awards_actor_id_film_id_fkey".into()), 744 | columns: vec![ 745 | ColumnName { 746 | name: "actor_id".into(), 747 | table: None, 748 | alias: None, 749 | }, 750 | ColumnName { 751 | name: "film_id".into(), 752 | table: None, 753 | alias: None, 754 | }, 755 | ], 756 | foreign_table: TableName { 757 | name: "film_actor".into(), 758 | schema: Some("public".into()), 759 | alias: None, 760 | }, 761 | referred_columns: vec![ 762 | ColumnName { 763 | name: "actor_id".into(), 764 | table: None, 765 | alias: None, 766 | }, 767 | ColumnName { 768 | name: "film_id".into(), 769 | table: None, 770 | alias: None, 771 | }, 772 | ], 773 | }), 774 | ] 775 | ); 776 | } 777 | 778 | #[test] 779 | fn organized_content() { 780 | let db_url = "postgres://postgres:p0stgr3s@localhost:5432/sakila"; 781 | let mut pool = Pool::new(); 782 | let db = pool.db(db_url); 783 | assert!(db.is_ok()); 784 | let mut db = db.unwrap(); 785 | let organized = get_organized_tables(&mut *db); 786 | //info!("organized: {:#?}", organized); 787 | assert!(organized.is_ok()); 788 | let organized = organized.unwrap(); 789 | assert_eq!(organized.len(), 1); 790 | assert_eq!(organized[0].schema, "public"); 791 | assert_eq!(organized[0].tablenames.len(), 23); 792 | assert_eq!(organized[0].views.len(), 7); 793 | } 794 | } 795 | --------------------------------------------------------------------------------