├── .github ├── FUNDING.yml └── workflows │ ├── audit.yml │ └── ci.yml ├── .gitignore ├── CHANGELOG.md ├── Cargo.toml ├── LICENSE-APACHE ├── LICENSE-MIT ├── README.md ├── clippy.toml ├── rust-toolchain ├── wundergraph ├── Cargo.toml ├── bench │ └── queries.rs ├── src │ ├── context.rs │ ├── diesel_ext.rs │ ├── error.rs │ ├── graphql_type.rs │ ├── helper │ │ ├── mod.rs │ │ ├── primary_keys.rs │ │ └── tuple.rs │ ├── juniper_ext │ │ ├── from_lookahead.rs │ │ ├── mod.rs │ │ └── nameable.rs │ ├── lib.rs │ ├── macros │ │ ├── mod.rs │ │ ├── mutation.rs │ │ └── query.rs │ ├── query_builder │ │ ├── mod.rs │ │ ├── mutations │ │ │ ├── delete.rs │ │ │ ├── insert │ │ │ │ ├── mod.rs │ │ │ │ ├── pg.rs │ │ │ │ └── sqlite.rs │ │ │ ├── mod.rs │ │ │ └── update.rs │ │ ├── selection │ │ │ ├── fields │ │ │ │ ├── associations.rs │ │ │ │ ├── field_list.rs │ │ │ │ ├── helper.rs │ │ │ │ └── mod.rs │ │ │ ├── filter │ │ │ │ ├── build_filter.rs │ │ │ │ ├── collector │ │ │ │ │ ├── and.rs │ │ │ │ │ ├── mod.rs │ │ │ │ │ └── or.rs │ │ │ │ ├── common_filter │ │ │ │ │ ├── eq.rs │ │ │ │ │ ├── eq_any.rs │ │ │ │ │ ├── mod.rs │ │ │ │ │ └── not_eq.rs │ │ │ │ ├── filter_helper.rs │ │ │ │ ├── filter_value.rs │ │ │ │ ├── inner_filter.rs │ │ │ │ ├── mod.rs │ │ │ │ ├── not.rs │ │ │ │ ├── nullable_filter │ │ │ │ │ ├── filter_option.rs │ │ │ │ │ ├── is_null.rs │ │ │ │ │ └── mod.rs │ │ │ │ ├── reference_filter.rs │ │ │ │ └── string_filter │ │ │ │ │ ├── like.rs │ │ │ │ │ └── mod.rs │ │ │ ├── mod.rs │ │ │ ├── offset.rs │ │ │ ├── order.rs │ │ │ ├── query_modifier.rs │ │ │ ├── query_resolver.rs │ │ │ └── select.rs │ │ └── types │ │ │ ├── field_value_resolver │ │ │ ├── direct_resolver.rs │ │ │ ├── has_one_resolver.rs │ │ │ └── mod.rs │ │ │ ├── has_many.rs │ │ │ ├── has_one.rs │ │ │ ├── mod.rs │ │ │ ├── placeholder.rs │ │ │ └── wundergraph_value.rs │ ├── scalar.rs │ └── third_party_integrations │ │ ├── chrono.rs │ │ ├── mod.rs │ │ └── uuid.rs └── tests │ ├── alias.rs │ ├── helper.rs │ ├── lib.rs │ ├── limit_offset.rs │ ├── mutations │ ├── create.rs │ ├── delete.rs │ ├── mod.rs │ └── update.rs │ ├── order.rs │ ├── query.rs │ ├── query_nested.rs │ ├── simple.rs │ └── type_checking.rs ├── wundergraph_bench ├── Cargo.toml ├── migrations │ ├── pg │ │ └── 2018-09-28-104319_setup │ │ │ ├── down.sql │ │ │ └── up.sql │ └── sqlite │ │ └── 2018-09-28-104542_setup │ │ ├── down.sql │ │ └── up.sql └── src │ ├── api.rs │ ├── bin │ └── wundergraph_bench.rs │ └── lib.rs ├── wundergraph_cli ├── Cargo.toml └── src │ ├── database.rs │ ├── infer_schema_internals │ ├── data_structures.rs │ ├── foreign_keys.rs │ ├── inference.rs │ ├── information_schema.rs │ ├── mod.rs │ ├── mysql.rs │ ├── pg.rs │ ├── sqlite.rs │ └── table_data.rs │ ├── main.rs │ └── print_schema │ ├── mod.rs │ ├── print_helper.rs │ ├── snapshots │ ├── wundergraph_cli__print_schema__tests__infer_schema@postgres.snap │ ├── wundergraph_cli__print_schema__tests__infer_schema@sqlite.snap │ ├── wundergraph_cli__print_schema__tests__round_trip_test__mutation.snap │ ├── wundergraph_cli__print_schema__tests__round_trip_test__query_1.snap │ └── wundergraph_cli__print_schema__tests__round_trip_test__query_2.snap │ └── template_main.rs ├── wundergraph_derive ├── Cargo.toml └── src │ ├── belonging_to.rs │ ├── build_filter_helper.rs │ ├── diagnostic_shim.rs │ ├── field.rs │ ├── lib.rs │ ├── meta.rs │ ├── model.rs │ ├── resolved_at_shim.rs │ ├── utils.rs │ ├── wundergraph_entity.rs │ ├── wundergraph_filter.rs │ └── wundergraph_value.rs └── wundergraph_example ├── Cargo.toml ├── migrations ├── .gitkeep ├── pg │ └── 2018-01-24-131925_setup │ │ ├── down.sql │ │ └── up.sql └── sqlite │ └── 2018-01-24-131925_setup │ ├── down.sql │ └── up.sql └── src ├── bin └── wundergraph_example.rs ├── lib.rs └── mutations.rs /.github/FUNDING.yml: -------------------------------------------------------------------------------- 1 | github: [weiznich] 2 | -------------------------------------------------------------------------------- /.github/workflows/audit.yml: -------------------------------------------------------------------------------- 1 | name: Security audit 2 | on: 3 | push: 4 | paths: 5 | - '**/Cargo.toml' 6 | - '**/Cargo.lock' 7 | schedule: 8 | - cron: '0 0 */7 * *' 9 | jobs: 10 | security_audit: 11 | runs-on: ubuntu-latest 12 | steps: 13 | - uses: actions/checkout@v1 14 | - uses: actions-rs/audit-check@v1 15 | with: 16 | token: ${{ secrets.GITHUB_TOKEN }} 17 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | 2 | **/target/ 3 | **/*.rs.bk 4 | Cargo.lock 5 | -------------------------------------------------------------------------------- /CHANGELOG.md: -------------------------------------------------------------------------------- 1 | # Change Log 2 | 3 | All user visible changes to this project will be documented in this file. 4 | This project adheres to [Semantic Versioning](http://semver.org/), as described 5 | for Rust libraries in [RFC #1105](https://github.com/rust-lang/rfcs/blob/master/text/1105-api-evolution.md) 6 | 7 | ## [0.1.2] - 2020-03-05 8 | 9 | * Add proper CI 10 | * Fix an issue with a missmatch between documentation and implemented featurs for the `wundergraph::mutation_object!` macro. It accepts now all documented options 11 | * Fix an issue with with missing filter options on nullable relations 12 | * Disable default features for diesel 13 | 14 | ## [0.1.1] - 2019-11-17 15 | 16 | * Small documentation fixes 17 | * Fix missing imports for third party types in code generated by `wundergraph_cli` (Thanks Matteo Bertini) 18 | 19 | ## 0.1.0 - 2019-11-07 20 | 21 | * Initial Release 22 | 23 | 24 | [0.1.1]: https://github.com/weiznich/wundergraph/compare/v0.1.0...v0.1.1 25 | [0.1.2]: https://github.com/weiznich/wundergraph/compare/v0.1.1...v0.1.2 26 | -------------------------------------------------------------------------------- /Cargo.toml: -------------------------------------------------------------------------------- 1 | [workspace] 2 | members = [ 3 | "wundergraph", 4 | "wundergraph_derive", 5 | "wundergraph_example", 6 | "wundergraph_cli", 7 | "wundergraph_bench", 8 | ] 9 | 10 | [patch.crates-io] 11 | #wundergraph_derive = { path = "wundergraph_derive"} 12 | -------------------------------------------------------------------------------- /LICENSE-MIT: -------------------------------------------------------------------------------- 1 | The MIT License (MIT) 2 | 3 | Copyright (c) 2015-2018 Sean Griffin 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | Wundergraph 2 | ========== 3 | 4 | Wundergraph provides a platform to easily expose your database through a GraphQL interface. 5 | 6 | [![Build Status](https://travis-ci.org/weiznich/wundergraph.svg?branch=master)](https://travis-ci.org/weiznich/wundergraph) 7 | 8 | 9 | ## Example 10 | 11 | For a full example application see the [example project](https://github.com/weiznich/wundergraph/blob/master/wundergraph_example/src/bin/wundergraph_example.rs) 12 | 13 | ```rust 14 | #[macro_use] extern crate diesel; 15 | use wundergraph::prelude::*; 16 | 17 | table! { 18 | heros { 19 | id -> Integer, 20 | name -> Text, 21 | hair_color -> Nullable, 22 | species -> Integer, 23 | } 24 | } 25 | 26 | table! { 27 | species { 28 | id -> Integer, 29 | name -> Text, 30 | } 31 | } 32 | 33 | #[derive(Clone, Debug, Identifiable, WundergraphEntity)] 34 | #[table_name = "heros"] 35 | pub struct Hero { 36 | id: i32, 37 | name: String, 38 | hair_color: Option, 39 | species: HasOne, 40 | } 41 | 42 | #[derive(Clone, Debug, Identifiable, WundergraphEntity)] 43 | #[table_name = "species"] 44 | pub struct Species { 45 | id: i32, 46 | name: String, 47 | heros: HasMany, 48 | } 49 | 50 | wundergraph::query_object!{ 51 | Query { 52 | Hero, 53 | Species, 54 | } 55 | } 56 | ``` 57 | 58 | ## Building 59 | 60 | Depending on your backend choice you need to install a native library. `libpq` is required for the postgresql feature, `libsqlite3` for the sqlite feature. 61 | 62 | ## License 63 | 64 | Licensed under either of these: 65 | 66 | * Apache License, Version 2.0, ([LICENSE-APACHE](LICENSE-APACHE) or 67 | https://www.apache.org/licenses/LICENSE-2.0) 68 | * MIT license ([LICENSE-MIT](LICENSE-MIT) or 69 | https://opensource.org/licenses/MIT) 70 | 71 | ### Contributing 72 | 73 | Unless you explicitly state otherwise, any contribution you intentionally submit 74 | for inclusion in the work, as defined in the Apache-2.0 license, shall be 75 | dual-licensed as above, without any additional terms or conditions. 76 | -------------------------------------------------------------------------------- /clippy.toml: -------------------------------------------------------------------------------- 1 | cognitive-complexity-threshold = 30 2 | doc-valid-idents = [ 3 | "MiB", "GiB", "TiB", "PiB", "EiB", 4 | "DirectX", "OpenGL", "TrueType", 5 | "GPLv2", "GPLv3", 6 | "GitHub", 7 | "IPv4", "IPv6", 8 | "JavaScript", "NaN", "OAuth", 9 | "SQLite", "PostgreSQL", "MySQL", 10 | "GraphQL" 11 | ] 12 | -------------------------------------------------------------------------------- /rust-toolchain: -------------------------------------------------------------------------------- 1 | 1.42.0 2 | -------------------------------------------------------------------------------- /wundergraph/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "wundergraph" 3 | version = "0.1.2" 4 | authors = ["Georg Semmler "] 5 | license = "MIT OR Apache-2.0" 6 | autotests = false 7 | edition = "2018" 8 | repository = "https://github.com/weiznich/wundergraph" 9 | readme = "../README.md" 10 | keywords = ["GraphQL", "ORM", "PostgreSQL", "SQLite"] 11 | categories = ["database", "web-programming"] 12 | description = "A GraphQL ORM build on top of diesel" 13 | 14 | [dependencies] 15 | serde = "1" 16 | diesel = { version = "1.4", features = ["r2d2"], default-features = false} 17 | juniper = "0.14" 18 | indexmap = "1" 19 | uuid_internal = { version = "0.7", optional = true, package = "uuid" } 20 | chrono_internal = { version = "0.4", optional = true, package = "chrono" } 21 | log = { version = "0.4", optional = true } 22 | paste = "0.1" 23 | thiserror = "1" 24 | 25 | [dependencies.wundergraph_derive] 26 | version = "0.1" 27 | path = "../wundergraph_derive" 28 | 29 | 30 | [dev-dependencies] 31 | wundergraph_example = { path = "../wundergraph_example", default-features = false } 32 | wundergraph_bench = { path = "../wundergraph_bench", default-features = false } 33 | diesel_migrations = "1.4.0" 34 | serde_json = "1" 35 | criterion = "0.3" 36 | lazy_static = "1" 37 | insta = "0.12" 38 | 39 | [features] 40 | default = [] 41 | debug = ["wundergraph_derive/debug", "log"] 42 | sqlite = ["diesel/sqlite", "wundergraph_derive/sqlite"] 43 | postgres = ["diesel/postgres", "wundergraph_derive/postgres"] 44 | extras = ["uuid", "chrono"] 45 | uuid = ["uuid_internal", "diesel/uuidv07"] 46 | chrono = ["chrono_internal", "diesel/chrono"] 47 | 48 | [[test]] 49 | name = "integration_tests" 50 | path = "tests/lib.rs" 51 | harness = true 52 | 53 | [[bench]] 54 | name = "queries" 55 | path = "bench/queries.rs" 56 | harness = false 57 | -------------------------------------------------------------------------------- /wundergraph/bench/queries.rs: -------------------------------------------------------------------------------- 1 | extern crate criterion; 2 | extern crate diesel; 3 | extern crate diesel_migrations; 4 | extern crate juniper; 5 | extern crate serde_json; 6 | extern crate wundergraph; 7 | extern crate wundergraph_bench; 8 | extern crate wundergraph_example; 9 | #[macro_use] 10 | extern crate lazy_static; 11 | 12 | #[cfg(feature = "postgres")] 13 | type DbConnection = diesel::pg::PgConnection; 14 | 15 | #[cfg(feature = "sqlite")] 16 | type DbConnection = diesel::sqlite::SqliteConnection; 17 | 18 | #[cfg(not(any(feature = "postgres", feature = "sqlite")))] 19 | compile_error!("At least one feature of \"sqlite\" or \"postgres\" needs to be enabled"); 20 | 21 | #[path = "../tests/helper.rs"] 22 | mod helper; 23 | 24 | use criterion::*; 25 | use diesel::r2d2::{ConnectionManager, PooledConnection}; 26 | use wundergraph_bench::Schema; 27 | 28 | const QUERIES: &[&str] = &[ 29 | r#"query albums_tracks_genre_all { 30 | Albums { 31 | album_id 32 | title 33 | tracks { 34 | track_id 35 | name 36 | genre_id { 37 | name 38 | } 39 | } 40 | } 41 | }"#, 42 | r#"query albums_tracks_genre_some { 43 | Albums(filter: {artist_id: {artist_id: {eq: 127}}}) { 44 | album_id 45 | title 46 | tracks { 47 | track_id 48 | name 49 | genre_id { 50 | name 51 | } 52 | } 53 | } 54 | }"#, 55 | r#"query tracks_media_all { 56 | Tracks { 57 | track_id 58 | name 59 | media_type_id { 60 | name 61 | } 62 | } 63 | }"#, 64 | r#"query tracks_media_some { 65 | Tracks (filter: {composer: {eq: "Kurt Cobain"}}){ 66 | track_id 67 | name 68 | album_id { 69 | album_id 70 | title 71 | } 72 | media_type_id { 73 | name 74 | } 75 | } 76 | }"#, 77 | r#"query artists_collaboration { 78 | Artists(filter: {albums: {tracks: {composer: {eq: "Ludwig van Beethoven"}}}}) 79 | { 80 | artist_id 81 | name 82 | } 83 | }"#, 84 | r#"query artistByArtistId { 85 | Artists(filter: {artist_id: {eq:3}}) { 86 | artist_id 87 | name 88 | } 89 | }"#, 90 | ]; 91 | 92 | fn query( 93 | query: &str, 94 | schema: &Schema, 95 | ctx: &PooledConnection>, 96 | ) { 97 | let res = helper::execute_query(&schema, &ctx, query); 98 | assert!(res.is_ok()); 99 | } 100 | 101 | fn bench(c: &mut Criterion) { 102 | let (schema, pool) = helper::get_bench_schema(); 103 | let ctx = pool.get().unwrap(); 104 | 105 | c.bench_function_over_inputs( 106 | "query", 107 | move |b, &&query_string| { 108 | b.iter(|| query(query_string, &schema, &ctx)); 109 | }, 110 | QUERIES, 111 | ); 112 | } 113 | 114 | criterion_group!(benches, bench); 115 | criterion_main!(benches); 116 | -------------------------------------------------------------------------------- /wundergraph/src/context.rs: -------------------------------------------------------------------------------- 1 | use diesel::Connection; 2 | 3 | /// A trait for types that could be used as context types for wundergraph 4 | pub trait WundergraphContext { 5 | /// The underlying connection type 6 | type Connection: Connection + 'static; 7 | 8 | /// Get a connection from the context 9 | fn get_connection(&self) -> &Self::Connection; 10 | } 11 | 12 | impl WundergraphContext for Conn 13 | where 14 | Conn: Connection + 'static, 15 | { 16 | type Connection = Self; 17 | 18 | fn get_connection(&self) -> &Self { 19 | self 20 | } 21 | } 22 | -------------------------------------------------------------------------------- /wundergraph/src/diesel_ext.rs: -------------------------------------------------------------------------------- 1 | //! A module containing extension traits for various diesel types 2 | 3 | use diesel::backend::Backend; 4 | use diesel::expression::{AppearsOnTable, Expression, NonAggregate, SelectableExpression}; 5 | use diesel::query_builder::{AstPass, QueryFragment, QueryId}; 6 | use diesel::result::QueryResult; 7 | use diesel::sql_types::IntoNullable; 8 | 9 | /// A helper trait used when boxing filters 10 | /// 11 | /// In Rust you cannot create a trait object with more than one trait. 12 | /// This type has all of the additional traits you would want when using 13 | /// `Box` as a single trait object. This type is comparable to 14 | /// diesels `BoxableExpression`, but allows to use non select able expressions, 15 | /// which is mainly useful for constructing filters. 16 | /// 17 | /// This is typically used as the return type of a function or as associated 18 | /// types in traits. 19 | pub trait BoxableFilter 20 | where 21 | DB: Backend, 22 | Self: Expression, 23 | Self: AppearsOnTable, 24 | Self: NonAggregate, 25 | Self: QueryFragment, 26 | { 27 | } 28 | 29 | impl BoxableFilter for T 30 | where 31 | DB: Backend, 32 | T: Expression, 33 | T: AppearsOnTable, 34 | T: NonAggregate, 35 | T: QueryFragment, 36 | { 37 | } 38 | 39 | /// A diesel helper type that indicates if null or some expression selected 40 | #[derive(Debug)] 41 | pub enum MaybeNull { 42 | /// Select the expression 43 | Expr(T), 44 | /// Select a null value 45 | Null, 46 | } 47 | 48 | impl Expression for MaybeNull 49 | where 50 | T: Expression, 51 | T::SqlType: IntoNullable, 52 | { 53 | type SqlType = ::Nullable; 54 | } 55 | 56 | impl QueryFragment for MaybeNull 57 | where 58 | DB: Backend, 59 | T: QueryFragment, 60 | { 61 | fn walk_ast(&self, mut pass: AstPass<'_, DB>) -> QueryResult<()> { 62 | match self { 63 | MaybeNull::Expr(e) => e.walk_ast(pass)?, 64 | MaybeNull::Null => pass.push_sql(" NULL "), 65 | } 66 | Ok(()) 67 | } 68 | } 69 | 70 | impl QueryId for MaybeNull 71 | where 72 | ST: QueryId, 73 | { 74 | type QueryId = (); 75 | const HAS_STATIC_QUERY_ID: bool = false; 76 | } 77 | 78 | impl NonAggregate for MaybeNull {} 79 | 80 | impl AppearsOnTable for MaybeNull where Self: Expression {} 81 | 82 | impl SelectableExpression for MaybeNull where Self: Expression {} 83 | -------------------------------------------------------------------------------- /wundergraph/src/error.rs: -------------------------------------------------------------------------------- 1 | //! This module contains all error handling related functionality in wundergraph 2 | 3 | use crate::scalar::WundergraphScalarValue; 4 | use thiserror::Error; 5 | 6 | /// The main error type of wundergraph 7 | #[derive(Debug, Error)] 8 | pub enum WundergraphError { 9 | /// Indicates that it was not possible to build a filter from the given 10 | /// graphql arguments 11 | #[error("Could not build filter from arguments")] 12 | CouldNotBuildFilterArgument, 13 | /// Indicates that a unknown database field name was passed into 14 | /// wundergraph 15 | #[error("Requested unkown field {name}")] 16 | UnknownDatabaseField { 17 | ///The name of the unknown database field 18 | name: String, 19 | }, 20 | /// Indicates that a primary key filter could not be build from the 21 | /// given arguments 22 | #[error("Could not build primary key filter from arguments")] 23 | NoPrimaryKeyArgumentFound, 24 | /// Indicates that building a graphql return value failed 25 | #[error("Failed to build a return value")] 26 | JuniperError { 27 | /// Error returned from juniper 28 | inner: juniper::FieldError, 29 | }, 30 | /// Indicates that executing a database query failed 31 | #[error("Failed to execute query")] 32 | DieselError { 33 | /// Error returned from diesel 34 | #[from] 35 | inner: diesel::result::Error, 36 | }, 37 | } 38 | 39 | /// Commonly used result type 40 | pub type Result = std::result::Result; 41 | -------------------------------------------------------------------------------- /wundergraph/src/graphql_type.rs: -------------------------------------------------------------------------------- 1 | use crate::query_builder::selection::offset::ApplyOffset; 2 | use crate::query_builder::selection::LoadingHandler; 3 | use crate::scalar::WundergraphScalarValue; 4 | use diesel::backend::Backend; 5 | use diesel::query_builder::QueryFragment; 6 | use diesel::QuerySource; 7 | use juniper::{meta, GraphQLType, Registry}; 8 | use std::marker::PhantomData; 9 | 10 | /// A helper type to automatically provide `juniper::GraphQLObject` implementation 11 | /// for types that also implement `LoadingHandler` 12 | #[derive(Debug)] 13 | pub struct GraphqlWrapper(T, PhantomData<(DB, Ctx)>); 14 | 15 | impl GraphQLType for GraphqlWrapper 16 | where 17 | DB: Backend + ApplyOffset + 'static, 18 | T::Table: 'static, 19 | ::FromClause: QueryFragment, 20 | T: LoadingHandler, 21 | T::FieldList: WundergraphGraphqlHelper, 22 | DB::QueryBuilder: Default, 23 | { 24 | type Context = (); 25 | type TypeInfo = (); 26 | 27 | fn name(_info: &Self::TypeInfo) -> Option<&str> { 28 | Some(T::TYPE_NAME) 29 | } 30 | 31 | fn meta<'r>( 32 | _info: &Self::TypeInfo, 33 | registry: &mut Registry<'r, WundergraphScalarValue>, 34 | ) -> meta::MetaType<'r, WundergraphScalarValue> 35 | where 36 | WundergraphScalarValue: 'r, 37 | { 38 | >::object_meta::( 39 | T::FIELD_NAMES, 40 | registry, 41 | ) 42 | } 43 | } 44 | 45 | #[doc(hidden)] 46 | pub trait WundergraphGraphqlMapper { 47 | type GraphQLType: GraphQLType; 48 | 49 | fn register_arguments<'r>( 50 | _registry: &mut Registry<'r, WundergraphScalarValue>, 51 | field: meta::Field<'r, WundergraphScalarValue>, 52 | ) -> meta::Field<'r, WundergraphScalarValue> { 53 | field 54 | } 55 | } 56 | 57 | impl WundergraphGraphqlMapper for T 58 | where 59 | T: GraphQLType, 60 | { 61 | type GraphQLType = Self; 62 | } 63 | 64 | #[doc(hidden)] 65 | pub trait WundergraphGraphqlHelper { 66 | fn object_meta<'r, T>( 67 | names: &[&str], 68 | registry: &mut Registry<'r, WundergraphScalarValue>, 69 | ) -> meta::MetaType<'r, WundergraphScalarValue> 70 | where 71 | T: GraphQLType; 72 | } 73 | 74 | macro_rules! wundergraph_graphql_helper_impl { 75 | ($( 76 | $Tuple:tt { 77 | $(($idx:tt) -> $T:ident, $ST: ident, $TT: ident,) + 78 | } 79 | )+) => { 80 | $( 81 | impl<$($T,)* Loading, Back, Ctx> WundergraphGraphqlHelper for ($($T,)*) 82 | where $($T: WundergraphGraphqlMapper,)* 83 | Back: Backend + ApplyOffset + 'static, 84 | Loading::Table: 'static, 85 | ::FromClause: QueryFragment, 86 | Loading: LoadingHandler, 87 | Back::QueryBuilder: Default, 88 | { 89 | fn object_meta<'r, Type>( 90 | names: &[&str], 91 | registry: &mut Registry<'r, WundergraphScalarValue>, 92 | ) -> meta::MetaType<'r, WundergraphScalarValue> 93 | where Type: GraphQLType 94 | { 95 | let fields = [ 96 | $({ 97 | let mut field = registry.field::<<$T as WundergraphGraphqlMapper>::GraphQLType>(names[$idx], &()); 98 | field = <$T as WundergraphGraphqlMapper>::register_arguments(registry, field); 99 | if let Some(doc) = Loading::field_description($idx) { 100 | field = field.description(doc); 101 | } 102 | if let Some(deprecated) = Loading::field_deprecation($idx) { 103 | field = field.deprecated(deprecated); 104 | } 105 | field 106 | },)* 107 | ]; 108 | let mut ty = registry.build_object_type::( 109 | &(), 110 | &fields, 111 | ); 112 | if let Some(doc) = Loading::TYPE_DESCRIPTION { 113 | ty = ty.description(doc); 114 | } 115 | meta::MetaType::Object(ty) 116 | } 117 | } 118 | )* 119 | }; 120 | } 121 | 122 | __diesel_for_each_tuple!(wundergraph_graphql_helper_impl); 123 | -------------------------------------------------------------------------------- /wundergraph/src/helper/mod.rs: -------------------------------------------------------------------------------- 1 | //! A module containing various helper traits and types mostly useful 2 | //! to work with tuples at compile time 3 | 4 | pub(crate) mod primary_keys; 5 | pub(crate) mod tuple; 6 | 7 | #[doc(inline)] 8 | pub use self::primary_keys::NamedTable; 9 | #[doc(inline)] 10 | pub use self::primary_keys::PrimaryKeyInputObject; 11 | #[doc(inline)] 12 | pub use self::primary_keys::UnRef; 13 | #[doc(inline)] 14 | pub use self::primary_keys::UnRefClone; 15 | #[doc(hidden)] 16 | pub use self::primary_keys::{PrimaryKeyArgument, PrimaryKeyInfo}; 17 | 18 | #[doc(inline)] 19 | pub use self::tuple::AppendToTuple; 20 | #[doc(inline)] 21 | pub use self::tuple::ConcatTuples; 22 | #[doc(inline)] 23 | pub use self::tuple::IsPrimaryKeyIndex; 24 | #[doc(inline)] 25 | pub use self::tuple::TupleIndex; 26 | #[doc(inline)] 27 | pub use self::tuple::*; 28 | -------------------------------------------------------------------------------- /wundergraph/src/juniper_ext/from_lookahead.rs: -------------------------------------------------------------------------------- 1 | use crate::scalar::WundergraphScalarValue; 2 | use juniper::{LookAheadValue, ID}; 3 | 4 | /// A helper trait marking how to convert a `LookAheadValue` into a specific type 5 | pub trait FromLookAheadValue: Sized { 6 | /// Try to convert a `LookAheadValue` into a specific type 7 | /// 8 | /// For a successful conversion `Some(value)` is returned, otherwise `None` 9 | fn from_look_ahead(v: &LookAheadValue<'_, WundergraphScalarValue>) -> Option; 10 | } 11 | 12 | impl FromLookAheadValue for i16 { 13 | fn from_look_ahead(v: &LookAheadValue<'_, WundergraphScalarValue>) -> Option { 14 | if let LookAheadValue::Scalar(WundergraphScalarValue::SmallInt(ref i)) = *v { 15 | Some(*i) 16 | } else { 17 | None 18 | } 19 | } 20 | } 21 | 22 | impl FromLookAheadValue for i32 { 23 | fn from_look_ahead(v: &LookAheadValue<'_, WundergraphScalarValue>) -> Option { 24 | match *v { 25 | LookAheadValue::Scalar(WundergraphScalarValue::SmallInt(ref i)) => Some(Self::from(*i)), 26 | LookAheadValue::Scalar(WundergraphScalarValue::Int(ref i)) => Some(*i), 27 | _ => None, 28 | } 29 | } 30 | } 31 | 32 | impl FromLookAheadValue for i64 { 33 | fn from_look_ahead(v: &LookAheadValue<'_, WundergraphScalarValue>) -> Option { 34 | match *v { 35 | LookAheadValue::Scalar(WundergraphScalarValue::SmallInt(ref i)) => Some(Self::from(*i)), 36 | LookAheadValue::Scalar(WundergraphScalarValue::Int(ref i)) => Some(Self::from(*i)), 37 | LookAheadValue::Scalar(WundergraphScalarValue::BigInt(ref i)) => Some(*i), 38 | _ => None, 39 | } 40 | } 41 | } 42 | 43 | impl FromLookAheadValue for bool { 44 | fn from_look_ahead(v: &LookAheadValue<'_, WundergraphScalarValue>) -> Option { 45 | if let LookAheadValue::Scalar(WundergraphScalarValue::Boolean(ref b)) = *v { 46 | Some(*b) 47 | } else { 48 | None 49 | } 50 | } 51 | } 52 | 53 | impl FromLookAheadValue for String { 54 | fn from_look_ahead(v: &LookAheadValue<'_, WundergraphScalarValue>) -> Option { 55 | if let LookAheadValue::Scalar(WundergraphScalarValue::String(ref s)) = *v { 56 | Some(s.to_owned()) 57 | } else { 58 | None 59 | } 60 | } 61 | } 62 | 63 | impl FromLookAheadValue for f32 { 64 | fn from_look_ahead(v: &LookAheadValue<'_, WundergraphScalarValue>) -> Option { 65 | if let LookAheadValue::Scalar(WundergraphScalarValue::Float(ref f)) = *v { 66 | Some(*f) 67 | } else { 68 | None 69 | } 70 | } 71 | } 72 | 73 | impl FromLookAheadValue for f64 { 74 | fn from_look_ahead(v: &LookAheadValue<'_, WundergraphScalarValue>) -> Option { 75 | match *v { 76 | LookAheadValue::Scalar(WundergraphScalarValue::Float(ref i)) => Some(Self::from(*i)), 77 | LookAheadValue::Scalar(WundergraphScalarValue::Double(ref i)) => Some(*i), 78 | _ => None, 79 | } 80 | } 81 | } 82 | 83 | impl FromLookAheadValue for ID { 84 | fn from_look_ahead(v: &LookAheadValue<'_, WundergraphScalarValue>) -> Option { 85 | match *v { 86 | LookAheadValue::Scalar(WundergraphScalarValue::Int(ref i)) => { 87 | Some(Self::from(i.to_string())) 88 | } 89 | LookAheadValue::Scalar(WundergraphScalarValue::String(ref s)) => { 90 | Some(Self::from(s.to_string())) 91 | } 92 | _ => None, 93 | } 94 | } 95 | } 96 | 97 | impl FromLookAheadValue for Vec 98 | where 99 | T: FromLookAheadValue, 100 | { 101 | fn from_look_ahead(v: &LookAheadValue<'_, WundergraphScalarValue>) -> Option { 102 | if let LookAheadValue::List(ref l) = *v { 103 | l.iter().map(T::from_look_ahead).collect() 104 | } else { 105 | None 106 | } 107 | } 108 | } 109 | 110 | impl FromLookAheadValue for Box 111 | where 112 | T: FromLookAheadValue, 113 | { 114 | fn from_look_ahead(v: &LookAheadValue<'_, WundergraphScalarValue>) -> Option { 115 | T::from_look_ahead(v).map(Box::new) 116 | } 117 | } 118 | 119 | impl FromLookAheadValue for Option 120 | where 121 | T: FromLookAheadValue, 122 | { 123 | fn from_look_ahead(v: &LookAheadValue<'_, WundergraphScalarValue>) -> Option { 124 | Some(T::from_look_ahead(v)) 125 | } 126 | } 127 | -------------------------------------------------------------------------------- /wundergraph/src/juniper_ext/mod.rs: -------------------------------------------------------------------------------- 1 | //! A module containing juniper specific extension traits 2 | 3 | mod from_lookahead; 4 | mod nameable; 5 | 6 | pub use self::from_lookahead::FromLookAheadValue; 7 | pub use self::nameable::{NameBuilder, Nameable}; 8 | -------------------------------------------------------------------------------- /wundergraph/src/juniper_ext/nameable.rs: -------------------------------------------------------------------------------- 1 | use std::marker::PhantomData; 2 | 3 | use juniper::ID; 4 | 5 | /// A helper type allowing to construct dynamical named types 6 | /// using the juniper api 7 | #[derive(Debug)] 8 | pub struct NameBuilder(String, PhantomData); 9 | 10 | impl Default for NameBuilder 11 | where 12 | T: Nameable, 13 | { 14 | fn default() -> Self { 15 | Self(T::name(), PhantomData) 16 | } 17 | } 18 | 19 | impl NameBuilder { 20 | /// Create a new `NameBuilder` with a given naem. 21 | pub fn name(&self) -> &str { 22 | &self.0 23 | } 24 | } 25 | 26 | /// Mark a given type as nameable in a graphql context 27 | pub trait Nameable { 28 | /// The name of the given type 29 | /// 30 | /// The returned name must be a valid graphq name. 31 | /// 1. The name must be unique for this type 32 | /// 2. The name should only contain alphanumerical 33 | /// characters and `_` 34 | // TODO: check this rules 35 | // TODO: Try to return `Cow`? 36 | fn name() -> String; 37 | } 38 | 39 | impl Nameable for String { 40 | fn name() -> String { 41 | Self::from("String") 42 | } 43 | } 44 | 45 | impl Nameable for i16 { 46 | fn name() -> String { 47 | String::from("SmallInt") 48 | } 49 | } 50 | 51 | impl Nameable for i32 { 52 | fn name() -> String { 53 | String::from("Int") 54 | } 55 | } 56 | 57 | impl Nameable for i64 { 58 | fn name() -> String { 59 | String::from("BigInt") 60 | } 61 | } 62 | 63 | impl Nameable for f32 { 64 | fn name() -> String { 65 | String::from("Float") 66 | } 67 | } 68 | 69 | impl Nameable for f64 { 70 | fn name() -> String { 71 | String::from("Double") 72 | } 73 | } 74 | 75 | impl Nameable for bool { 76 | fn name() -> String { 77 | String::from("bool") 78 | } 79 | } 80 | 81 | impl Nameable for ID { 82 | fn name() -> String { 83 | String::from("ID") 84 | } 85 | } 86 | 87 | impl Nameable for Option 88 | where 89 | T: Nameable, 90 | { 91 | fn name() -> String { 92 | format!("Nullable_{}_", T::name()) 93 | } 94 | } 95 | 96 | impl Nameable for Vec 97 | where 98 | T: Nameable, 99 | { 100 | fn name() -> String { 101 | format!("Vec_{}_", T::name()) 102 | } 103 | } 104 | 105 | impl Nameable for () { 106 | fn name() -> String { 107 | String::new() 108 | } 109 | } 110 | -------------------------------------------------------------------------------- /wundergraph/src/lib.rs: -------------------------------------------------------------------------------- 1 | //! Wundergraph provides a platform to easily expose your database through 2 | //! a GraphQL interface. 3 | //! 4 | //! ## Short example 5 | //! 6 | //! ```rust 7 | //! # #[macro_use] extern crate diesel; 8 | //! # 9 | //! use wundergraph::prelude::*; 10 | //! 11 | //! table! { 12 | //! heros { 13 | //! id -> Integer, 14 | //! name -> Text, 15 | //! hair_color -> Nullable, 16 | //! species -> Integer, 17 | //! } 18 | //! } 19 | //! 20 | //! table! { 21 | //! species { 22 | //! id -> Integer, 23 | //! name -> Text, 24 | //! } 25 | //! } 26 | //! 27 | //! #[derive(Clone, Debug, Identifiable, WundergraphEntity)] 28 | //! #[table_name = "heros"] 29 | //! pub struct Hero { 30 | //! id: i32, 31 | //! name: String, 32 | //! hair_color: Option, 33 | //! species: HasOne, 34 | //! } 35 | //! 36 | //! #[derive(Clone, Debug, Identifiable, WundergraphEntity)] 37 | //! #[table_name = "species"] 38 | //! pub struct Species { 39 | //! id: i32, 40 | //! name: String, 41 | //! heros: HasMany, 42 | //! } 43 | //! 44 | //! wundergraph::query_object!{ 45 | //! Query { 46 | //! Hero, 47 | //! Species, 48 | //! } 49 | //! } 50 | //! 51 | //! # fn main() {} 52 | //! ``` 53 | //! 54 | //! ## Where to find things 55 | //! 56 | //! Everything required for basic usage of wundergraph is exposed through 57 | //! [`wundergraph::prelude`](prelude/index.html). 58 | //! [`wundergraph::query_builder::selection`](query_builder/selection/index.html) 59 | //! contains functionality to manual extend or implement a query entity, 60 | //! [`wundergraph::query_builder::mutations`](query_builder/mutations/index.html) 61 | //! contains similar functionality for mutations. 62 | //! [`wundergraph::scalar`](scalar/index.html) provides the implementation of 63 | //! the internal used juniper scalar value type. [`wundergraph::error`](error/index.html) 64 | //! contains the definition of the internal error type. 65 | //! [`wundergraph::diesel_ext`](diesel_ext/index.html) and 66 | //! [`wundergraph::juniper_ext`](juniper_ext/index.html) provide 67 | //! extension traits and types for the corresponding crates. 68 | //! [`wundergraph::helper`](helper/index.html) contains wundergraph 69 | //! specific helper types. 70 | //! 71 | //! 72 | 73 | #![deny(missing_debug_implementations, missing_copy_implementations)] 74 | #![warn( 75 | missing_docs, 76 | clippy::option_unwrap_used, 77 | clippy::result_unwrap_used, 78 | clippy::print_stdout, 79 | clippy::wrong_pub_self_convention, 80 | clippy::mut_mut, 81 | clippy::non_ascii_literal, 82 | clippy::similar_names, 83 | clippy::unicode_not_nfc, 84 | clippy::enum_glob_use, 85 | clippy::if_not_else, 86 | clippy::items_after_statements, 87 | clippy::used_underscore_binding, 88 | clippy::cargo_common_metadata, 89 | clippy::dbg_macro, 90 | clippy::doc_markdown, 91 | clippy::filter_map, 92 | clippy::map_flatten, 93 | clippy::match_same_arms, 94 | clippy::needless_borrow, 95 | clippy::needless_pass_by_value, 96 | clippy::option_map_unwrap_or, 97 | clippy::option_map_unwrap_or_else, 98 | clippy::redundant_clone, 99 | clippy::result_map_unwrap_or_else, 100 | clippy::unnecessary_unwrap, 101 | clippy::unseparated_literal_suffix, 102 | clippy::wildcard_dependencies 103 | )] 104 | #![allow(clippy::type_complexity)] 105 | 106 | #[doc(hidden)] 107 | #[macro_use] 108 | pub extern crate diesel; 109 | #[doc(hidden)] 110 | pub extern crate indexmap; 111 | #[doc(hidden)] 112 | pub extern crate juniper; 113 | #[doc(hidden)] 114 | #[cfg(feature = "debug")] 115 | pub extern crate log; 116 | #[doc(hidden)] 117 | pub extern crate paste; 118 | 119 | pub use wundergraph_derive::WundergraphEntity; 120 | 121 | pub mod diesel_ext; 122 | pub mod error; 123 | pub mod helper; 124 | pub mod juniper_ext; 125 | pub mod scalar; 126 | #[macro_use] 127 | mod macros; 128 | pub(crate) mod context; 129 | #[doc(hidden)] 130 | pub mod graphql_type; 131 | pub mod query_builder; 132 | 133 | mod third_party_integrations; 134 | 135 | pub mod prelude { 136 | //! Re-exports important traits and types. Meant to be glob imported 137 | //! when using wundergraph. 138 | 139 | #[doc(inline)] 140 | pub use super::context::WundergraphContext; 141 | 142 | #[doc(inline)] 143 | pub use super::query_builder::types::{HasMany, HasOne}; 144 | 145 | #[doc(inline)] 146 | pub use crate::query_builder::selection::{BoxedQuery, QueryModifier}; 147 | 148 | #[doc(inline)] 149 | pub use super::WundergraphEntity; 150 | 151 | #[doc(inline)] 152 | pub use super::query_object; 153 | 154 | #[doc(inline)] 155 | pub use super::mutation_object; 156 | } 157 | 158 | #[doc(hidden)] 159 | pub use self::prelude::*; 160 | 161 | #[macro_export] 162 | #[doc(hidden)] 163 | /// Used by `wundergraph_derives`, which can't access `$crate` 164 | macro_rules! __use_everything { 165 | () => { 166 | pub use $crate::*; 167 | }; 168 | } 169 | -------------------------------------------------------------------------------- /wundergraph/src/macros/mod.rs: -------------------------------------------------------------------------------- 1 | #[macro_use] 2 | mod query; 3 | #[macro_use] 4 | mod mutation; 5 | 6 | #[doc(hidden)] 7 | #[macro_export] 8 | macro_rules! __wundergraph_debug_log_wrapper { 9 | ($($t:tt)*) => {$crate::log::debug!($($t)*)} 10 | } 11 | -------------------------------------------------------------------------------- /wundergraph/src/query_builder/mod.rs: -------------------------------------------------------------------------------- 1 | //! This module contains functionality used by wundergraph 2 | //! to convert a GraphQL request as sql query. 3 | 4 | pub mod mutations; 5 | pub mod selection; 6 | pub mod types; 7 | -------------------------------------------------------------------------------- /wundergraph/src/query_builder/mutations/delete.rs: -------------------------------------------------------------------------------- 1 | use crate::context::WundergraphContext; 2 | use crate::query_builder::selection::fields::WundergraphFieldList; 3 | use crate::query_builder::selection::offset::ApplyOffset; 4 | use crate::query_builder::selection::order::BuildOrder; 5 | use crate::query_builder::selection::select::BuildSelect; 6 | use crate::query_builder::selection::{LoadingHandler, SqlTypeOfPlaceholder}; 7 | use crate::scalar::WundergraphScalarValue; 8 | use diesel::associations::HasTable; 9 | use diesel::backend::Backend; 10 | use diesel::dsl::Filter; 11 | use diesel::query_builder::{IntoUpdateTarget, QueryFragment, QueryId}; 12 | use diesel::query_dsl::methods::FilterDsl; 13 | use diesel::Identifiable; 14 | use diesel::{Connection, EqAll, QuerySource, RunQueryDsl, Table}; 15 | use juniper::{ 16 | Arguments, ExecutionResult, Executor, FieldError, FromInputValue, GraphQLObject, Value, 17 | }; 18 | 19 | /// A struct representing the number of deleted entities 20 | #[derive(Debug, GraphQLObject, Clone, Copy)] 21 | #[graphql(scalar = WundergraphScalarValue)] 22 | pub struct DeletedCount { 23 | /// Number of deleted entities 24 | pub count: i64, 25 | } 26 | 27 | #[doc(hidden)] 28 | pub fn handle_delete( 29 | executor: &Executor<'_, Ctx, WundergraphScalarValue>, 30 | arguments: &Arguments<'_, WundergraphScalarValue>, 31 | field_name: &'static str, 32 | ) -> ExecutionResult 33 | where 34 | R: LoadingHandler, 35 | R::Table: HandleDelete + 'static, 36 | DB: Backend + ApplyOffset + 'static, 37 | DB::QueryBuilder: Default, 38 | R::Columns: BuildOrder 39 | + BuildSelect< 40 | R::Table, 41 | DB, 42 | SqlTypeOfPlaceholder, 43 | >, 44 | ::FromClause: QueryFragment, 45 | D: FromInputValue, 46 | { 47 | if let Some(n) = arguments.get::(field_name) { 48 | >::handle_delete(executor, &n) 49 | } else { 50 | let msg = format!("Missing argument {:?}", field_name); 51 | Err(FieldError::new(&msg, Value::Null)) 52 | } 53 | } 54 | 55 | /// A trait to handle delete mutations for database entities 56 | /// 57 | /// Type parameters: 58 | /// * `Self`: database table type from diesel 59 | /// * `L`: Struct implementing `LoadingHandler` 60 | /// * `K`: Input type used determine which entities should be deleted. 61 | /// Normally something representing the primary key of the table 62 | /// * `DB`: Backend type from diesel, so one of `Pg` or `Sqlite` 63 | /// * `Ctx`: The used wundergraph context type 64 | pub trait HandleDelete { 65 | /// Actual function called to delete a database entity 66 | fn handle_delete( 67 | executor: &Executor<'_, Ctx, WundergraphScalarValue>, 68 | to_delete: &K, 69 | ) -> ExecutionResult; 70 | } 71 | 72 | // We use the 'static static lifetime here because otherwise rustc will 73 | // tell us that it could not find a applying lifetime (caused by broken projection 74 | // on higher ranked lifetime bounds) 75 | impl HandleDelete for T 76 | where 77 | T: Table + HasTable + QueryId + 'static, 78 | DB: Backend + ApplyOffset + 'static, 79 | DB::QueryBuilder: Default, 80 | T::FromClause: QueryFragment, 81 | L: LoadingHandler, 82 | L::Columns: BuildOrder 83 | + BuildSelect>, 84 | Ctx: WundergraphContext, 85 | Ctx::Connection: Connection, 86 | L::FieldList: WundergraphFieldList, 87 | K: 'static, 88 | &'static K: Identifiable
, 89 | T::PrimaryKey: EqAll<<&'static K as Identifiable>::Id> + Default, 90 | T::Query: FilterDsl<::Id>>::Output>, 91 | Filter::Id>>::Output>: IntoUpdateTarget
, 92 | ::Id>>::Output> as IntoUpdateTarget>::WhereClause: QueryFragment 93 | + QueryId, 94 | { 95 | fn handle_delete( 96 | executor: &Executor<'_, Ctx, WundergraphScalarValue>, 97 | to_delete: &K, 98 | ) -> ExecutionResult { 99 | let ctx = executor.context(); 100 | let conn = ctx.get_connection(); 101 | conn.transaction(|| -> ExecutionResult { 102 | // this is safe becuse we do not leak to_delete out of this function 103 | let static_to_delete: &'static K = unsafe { &*(to_delete as *const K) }; 104 | let filter = T::PrimaryKey::default().eq_all(static_to_delete.id()); 105 | let d = ::diesel::delete(FilterDsl::filter(Self::table(), filter)); 106 | #[cfg(feature = "debug")] 107 | { 108 | log::debug!("{}", ::diesel::debug_query(&d)); 109 | } 110 | 111 | executor.resolve_with_ctx( 112 | &(), 113 | &DeletedCount { 114 | count: d.execute(conn)? as _, 115 | }, 116 | ) 117 | }) 118 | } 119 | } 120 | -------------------------------------------------------------------------------- /wundergraph/src/query_builder/mutations/insert/mod.rs: -------------------------------------------------------------------------------- 1 | use crate::query_builder::selection::offset::ApplyOffset; 2 | use crate::query_builder::selection::order::BuildOrder; 3 | use crate::query_builder::selection::select::BuildSelect; 4 | use crate::query_builder::selection::LoadingHandler; 5 | use crate::query_builder::selection::SqlTypeOfPlaceholder; 6 | use crate::scalar::WundergraphScalarValue; 7 | use diesel::backend::Backend; 8 | use diesel::query_builder::QueryFragment; 9 | use diesel::QuerySource; 10 | use juniper::{Arguments, ExecutionResult, Executor, FieldError, FromInputValue, Selection, Value}; 11 | 12 | #[cfg(feature = "postgres")] 13 | mod pg; 14 | 15 | #[cfg(feature = "sqlite")] 16 | mod sqlite; 17 | 18 | #[doc(hidden)] 19 | pub fn handle_insert( 20 | selection: Option<&'_ [Selection<'_, WundergraphScalarValue>]>, 21 | executor: &Executor<'_, Ctx, WundergraphScalarValue>, 22 | arguments: &Arguments<'_, WundergraphScalarValue>, 23 | field_name: &'static str, 24 | ) -> ExecutionResult 25 | where 26 | R: LoadingHandler, 27 | R::Table: HandleInsert + 'static, 28 | DB: Backend + ApplyOffset + 'static, 29 | DB::QueryBuilder: Default, 30 | R::Columns: BuildOrder 31 | + BuildSelect< 32 | R::Table, 33 | DB, 34 | SqlTypeOfPlaceholder, 35 | >, 36 | ::FromClause: QueryFragment, 37 | I: FromInputValue, 38 | { 39 | if let Some(n) = arguments.get::(field_name) { 40 | >::handle_insert(selection, executor, n) 41 | } else { 42 | let msg = format!("Missing argument {}", field_name); 43 | Err(FieldError::new(&msg, Value::Null)) 44 | } 45 | } 46 | 47 | #[doc(hidden)] 48 | pub fn handle_batch_insert( 49 | selection: Option<&'_ [Selection<'_, WundergraphScalarValue>]>, 50 | executor: &Executor<'_, Ctx, WundergraphScalarValue>, 51 | arguments: &Arguments<'_, WundergraphScalarValue>, 52 | field_name: &'static str, 53 | ) -> ExecutionResult 54 | where 55 | R: LoadingHandler, 56 | R::Table: HandleBatchInsert + 'static, 57 | DB: Backend + ApplyOffset + 'static, 58 | DB::QueryBuilder: Default, 59 | R::Columns: BuildOrder 60 | + BuildSelect< 61 | R::Table, 62 | DB, 63 | SqlTypeOfPlaceholder, 64 | >, 65 | ::FromClause: QueryFragment, 66 | I: FromInputValue, 67 | { 68 | if let Some(n) = arguments.get::>(field_name) { 69 | >::handle_batch_insert(selection, executor, n) 70 | } else { 71 | let msg = format!("Missing argument {}", field_name); 72 | Err(FieldError::new(&msg, Value::Null)) 73 | } 74 | } 75 | 76 | /// A trait to handle insert mutations for database entities 77 | /// 78 | /// Type parameters: 79 | /// * `Self`: database table type for diesel 80 | /// * `I`: data to insert into the table 81 | /// * `DB`: Backend type from diesel, so one of `Pg` or `Sqlite` 82 | /// * `Ctx`: The used wundergraph context type 83 | /// 84 | /// A default implementation is provided for all types implementing 85 | /// `diesel::Insertable` 86 | pub trait HandleInsert { 87 | /// Actual function called to insert a database entity 88 | fn handle_insert( 89 | selection: Option<&'_ [Selection<'_, WundergraphScalarValue>]>, 90 | executor: &Executor<'_, Ctx, WundergraphScalarValue>, 91 | insertable: I, 92 | ) -> ExecutionResult; 93 | } 94 | 95 | /// A trait to handle batch insert mutations for database entities 96 | /// 97 | /// Type parameters: 98 | /// * `Self`: database table type for diesel 99 | /// * `I`: data to insert into the table 100 | /// * `DB`: Backend type from diesel, so one of `Pg` or `Sqlite` 101 | /// * `Ctx`: The used wundergraph context type 102 | /// 103 | /// A default implementation is provided for all types implementing 104 | /// `diesel::Insertable` 105 | pub trait HandleBatchInsert { 106 | /// Actual function called to insert a batch of database entity 107 | fn handle_batch_insert( 108 | selection: Option<&'_ [Selection<'_, WundergraphScalarValue>]>, 109 | executor: &Executor<'_, Ctx, WundergraphScalarValue>, 110 | insertable: Vec, 111 | ) -> ExecutionResult; 112 | } 113 | -------------------------------------------------------------------------------- /wundergraph/src/query_builder/mutations/insert/sqlite.rs: -------------------------------------------------------------------------------- 1 | use super::{HandleBatchInsert, HandleInsert}; 2 | use crate::context::WundergraphContext; 3 | use crate::query_builder::selection::fields::WundergraphFieldList; 4 | use crate::query_builder::selection::filter::build_filter::BuildFilter; 5 | use crate::query_builder::selection::order::BuildOrder; 6 | use crate::query_builder::selection::query_modifier::QueryModifier; 7 | use crate::query_builder::selection::select::BuildSelect; 8 | use crate::query_builder::selection::{LoadingHandler, SqlTypeOfPlaceholder}; 9 | use crate::scalar::WundergraphScalarValue; 10 | use diesel::associations::HasTable; 11 | use diesel::dsl::SqlTypeOf; 12 | use diesel::expression::dsl::sql; 13 | use diesel::query_builder::{BoxedSelectStatement, InsertStatement, QueryFragment}; 14 | use diesel::query_dsl::methods::{BoxedDsl, ExecuteDsl, LimitDsl, OrderDsl}; 15 | use diesel::sql_types::{Bool, HasSqlType}; 16 | use diesel::sqlite::Sqlite; 17 | use diesel::{AppearsOnTable, Connection, Insertable, RunQueryDsl, Table}; 18 | use juniper::{ExecutionResult, Executor, Selection, Value}; 19 | 20 | impl HandleInsert for T 21 | where 22 | T: Table + HasTable
+ 'static, 23 | T::FromClause: QueryFragment, 24 | L: LoadingHandler, 25 | L::Columns: BuildOrder 26 | + BuildSelect< 27 | T, 28 | Sqlite, 29 | SqlTypeOfPlaceholder, 30 | >, 31 | Ctx: WundergraphContext + QueryModifier, 32 | Ctx::Connection: Connection, 33 | L::FieldList: WundergraphFieldList, 34 | I: Insertable, 35 | I::Values: QueryFragment, 36 | InsertStatement: ExecuteDsl, 37 | T: BoxedDsl< 38 | 'static, 39 | Sqlite, 40 | Output = BoxedSelectStatement<'static, SqlTypeOf<::AllColumns>, T, Sqlite>, 41 | >, 42 | >::Ret: AppearsOnTable, 43 | Sqlite: HasSqlType>, 44 | { 45 | fn handle_insert( 46 | selection: Option<&'_ [Selection<'_, WundergraphScalarValue>]>, 47 | executor: &Executor<'_, Ctx, WundergraphScalarValue>, 48 | insertable: I, 49 | ) -> ExecutionResult { 50 | let ctx = executor.context(); 51 | let conn = ctx.get_connection(); 52 | conn.transaction(|| -> ExecutionResult { 53 | let look_ahead = executor.look_ahead(); 54 | insertable.insert_into(T::table()).execute(conn)?; 55 | let q = OrderDsl::order(L::build_query(&[], &look_ahead)?, sql::("rowid DESC")); 56 | let q = LimitDsl::limit(q, 1); 57 | let items = L::load(&look_ahead, selection, executor, q)?; 58 | 59 | Ok(items.into_iter().next().unwrap_or(Value::Null)) 60 | }) 61 | } 62 | } 63 | 64 | impl HandleBatchInsert for T 65 | where 66 | T: Table + HasTable
+ 'static, 67 | T::FromClause: QueryFragment, 68 | L: LoadingHandler, 69 | L::Columns: BuildOrder 70 | + BuildSelect< 71 | T, 72 | Sqlite, 73 | SqlTypeOfPlaceholder, 74 | >, 75 | Ctx: WundergraphContext + QueryModifier, 76 | Ctx::Connection: Connection, 77 | L::FieldList: WundergraphFieldList, 78 | I: Insertable, 79 | I::Values: QueryFragment, 80 | InsertStatement: ExecuteDsl, 81 | T: BoxedDsl< 82 | 'static, 83 | Sqlite, 84 | Output = BoxedSelectStatement<'static, SqlTypeOf<::AllColumns>, T, Sqlite>, 85 | >, 86 | >::Ret: AppearsOnTable, 87 | Sqlite: HasSqlType>, 88 | { 89 | fn handle_batch_insert( 90 | selection: Option<&'_ [Selection<'_, WundergraphScalarValue>]>, 91 | executor: &Executor<'_, Ctx, WundergraphScalarValue>, 92 | batch: Vec, 93 | ) -> ExecutionResult { 94 | let ctx = executor.context(); 95 | let conn = ctx.get_connection(); 96 | conn.transaction(|| -> ExecutionResult { 97 | let look_ahead = executor.look_ahead(); 98 | let n: usize = batch 99 | .into_iter() 100 | .map(|i| i.insert_into(T::table()).execute(conn)) 101 | .collect::, _>>()? 102 | .into_iter() 103 | .sum(); 104 | let q = OrderDsl::order(L::build_query(&[], &look_ahead)?, sql::("rowid DESC")); 105 | let q = LimitDsl::limit(q, n as i64); 106 | let items = L::load(&look_ahead, selection, executor, q)?; 107 | Ok(Value::list(items.into_iter().rev().collect())) 108 | }) 109 | } 110 | } 111 | -------------------------------------------------------------------------------- /wundergraph/src/query_builder/mutations/mod.rs: -------------------------------------------------------------------------------- 1 | //! This module contains all functionality that is needed to implement mutations 2 | //! 3 | //! In general mutations should just work without any additional work that 4 | //! writing some struct definition and deriving basic diesel for them 5 | //! For special cases a manual implementation of one of the for exported traits 6 | //! is required 7 | //! 8 | //! # Insert 9 | //! 10 | //! The easiest way to provide a single table insert mutation is to crate a struct 11 | //! with all corresponding fields that derive `#[derive(Insertable, GrahpQLInputobject)]` 12 | //! ```rust 13 | //! # #[macro_use] 14 | //! # extern crate diesel; 15 | //! # #[macro_use] 16 | //! # extern crate juniper; 17 | //! # table! { 18 | //! # heros { 19 | //! # id -> Integer, 20 | //! # name -> Text, 21 | //! # species -> Nullable, 22 | //! # home_world -> Nullable, 23 | //! # } 24 | //! # } 25 | //! 26 | //! #[derive(Insertable, GraphQLInputObject, Clone, Debug)] 27 | //! #[table_name = "heros"] 28 | //! pub struct NewHero { 29 | //! name: String, 30 | //! species: i32, 31 | //! home_world: Option, 32 | //! } 33 | //! # fn main() {} 34 | //! ``` 35 | //! 36 | //! For more complex cases like inserts that involve multiple tables at one 37 | //! implement [`HandleInsert`](trait.HandleInsert.html) and 38 | //! [`InsertHelper`](trait.InsertHelper.html) manually 39 | //! 40 | //! # Update 41 | //! 42 | //! Similar to `Insert` operations the easiest way to provide a single table update 43 | //! mutation is to create a struct with all corresponding fields that derive 44 | //! `#[derive(AsChangeset, GraphqlInputObject, Identifiable)]` 45 | //! ```rust 46 | //! # #[macro_use] 47 | //! # extern crate diesel; 48 | //! # #[macro_use] 49 | //! # extern crate juniper; 50 | //! # table! { 51 | //! # heros { 52 | //! # id -> Integer, 53 | //! # name -> Text, 54 | //! # species -> Nullable, 55 | //! # home_world -> Nullable, 56 | //! # } 57 | //! # } 58 | //! 59 | //! #[derive(AsChangeset, GraphQLInputObject, Identifiable, Debug)] 60 | //! #[table_name = "heros"] 61 | //! pub struct HeroChangeset { 62 | //! id: i32, 63 | //! name: Option, 64 | //! species: Option, 65 | //! home_world: Option, 66 | //! } 67 | //! # fn main() {} 68 | //! ``` 69 | 70 | mod delete; 71 | mod insert; 72 | mod update; 73 | 74 | #[doc(inline)] 75 | pub use self::delete::{DeletedCount, HandleDelete}; 76 | #[doc(inline)] 77 | pub use self::insert::{HandleBatchInsert, HandleInsert}; 78 | #[doc(inline)] 79 | pub use self::update::HandleUpdate; 80 | 81 | #[doc(hidden)] 82 | pub use self::delete::handle_delete; 83 | #[doc(hidden)] 84 | pub use self::insert::{handle_batch_insert, handle_insert}; 85 | #[doc(hidden)] 86 | pub use self::update::handle_update; 87 | -------------------------------------------------------------------------------- /wundergraph/src/query_builder/selection/fields/mod.rs: -------------------------------------------------------------------------------- 1 | //! This module contains helper types to work with fields of wundergraph entities 2 | 3 | mod associations; 4 | mod field_list; 5 | mod helper; 6 | 7 | #[doc(inline)] 8 | pub use self::helper::{ 9 | FieldListExtractor, NonTableFieldCollector, NonTableFieldExtractor, TableFieldCollector, 10 | }; 11 | 12 | #[doc(inline)] 13 | pub use self::associations::WundergraphBelongsTo; 14 | #[doc(inline)] 15 | pub use self::field_list::WundergraphFieldList; 16 | #[doc(inline)] 17 | pub use wundergraph_derive::WundergraphBelongsTo; 18 | 19 | pub(crate) use self::associations::WundergraphResolveAssociations; 20 | -------------------------------------------------------------------------------- /wundergraph/src/query_builder/selection/filter/build_filter.rs: -------------------------------------------------------------------------------- 1 | use crate::diesel_ext::BoxableFilter; 2 | use diesel::backend::Backend; 3 | use diesel::expression::{Expression, NonAggregate, SqlLiteral}; 4 | use diesel::query_builder::QueryFragment; 5 | use diesel::sql_types::Bool; 6 | 7 | /// A trait that indicates that some type could be converted into a sql filter 8 | /// operation. 9 | pub trait BuildFilter 10 | where 11 | DB: Backend, 12 | { 13 | /// The return type of the constructed filter 14 | type Ret: Expression + NonAggregate + QueryFragment; 15 | 16 | /// A function that convertes a given type into a diesel filter expression 17 | fn into_filter(self) -> Option; 18 | } 19 | 20 | impl<'a, T, DB> BuildFilter 21 | for Box + 'a> 22 | where 23 | DB: Backend, 24 | { 25 | type Ret = Self; 26 | fn into_filter(self) -> Option { 27 | Some(self) 28 | } 29 | } 30 | 31 | impl BuildFilter for Option 32 | where 33 | T: BuildFilter, 34 | DB: Backend, 35 | { 36 | type Ret = T::Ret; 37 | 38 | fn into_filter(self) -> Option { 39 | self.and_then(BuildFilter::into_filter) 40 | } 41 | } 42 | 43 | impl BuildFilter for () 44 | where 45 | DB: Backend, 46 | { 47 | type Ret = SqlLiteral; 48 | 49 | fn into_filter(self) -> Option { 50 | None 51 | } 52 | } 53 | -------------------------------------------------------------------------------- /wundergraph/src/query_builder/selection/filter/collector/and.rs: -------------------------------------------------------------------------------- 1 | use super::FilterCollector; 2 | use crate::diesel_ext::BoxableFilter; 3 | use crate::query_builder::selection::filter::build_filter::BuildFilter; 4 | use diesel::backend::Backend; 5 | use diesel::query_builder::QueryFragment; 6 | use diesel::{AppearsOnTable, BoolExpressionMethods}; 7 | use std::fmt::{self, Debug}; 8 | 9 | /// A filter collected that combines all given filters using `AND` 10 | pub struct AndCollector<'a, T, DB>( 11 | Option + 'a>>, 12 | ); 13 | 14 | impl<'a, T, DB> Debug for AndCollector<'a, T, DB> 15 | where 16 | DB: Backend, 17 | DB::QueryBuilder: Default, 18 | { 19 | fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result { 20 | fmt.debug_tuple("AndCollector") 21 | .field(&self.0.as_ref().map(|q| ::diesel::debug_query(q))) 22 | .finish() 23 | } 24 | } 25 | 26 | impl<'a, T, DB> Default for AndCollector<'a, T, DB> { 27 | fn default() -> Self { 28 | AndCollector(None) 29 | } 30 | } 31 | 32 | impl<'a, T, DB> FilterCollector<'a, T, DB> for AndCollector<'a, T, DB> 33 | where 34 | DB: Backend + 'a, 35 | T: 'a, 36 | { 37 | fn append_filter(&mut self, f: F) 38 | where 39 | F: BuildFilter + 'a, 40 | F::Ret: AppearsOnTable + QueryFragment + 'a, 41 | { 42 | let f = f.into_filter(); 43 | let c = ::std::mem::replace(&mut self.0, None); 44 | self.0 = match (c, f) { 45 | (Some(c), Some(f)) => Some(Box::new(c.and(f)) as Box<_>), 46 | (Some(c), None) => Some(c), 47 | (None, Some(f)) => Some(Box::new(f) as Box<_>), 48 | (None, None) => None, 49 | }; 50 | } 51 | } 52 | 53 | impl<'a, T, DB> BuildFilter for AndCollector<'a, T, DB> 54 | where 55 | DB: Backend, 56 | { 57 | type Ret = Box + 'a>; 58 | 59 | fn into_filter(self) -> Option { 60 | self.0 61 | } 62 | } 63 | -------------------------------------------------------------------------------- /wundergraph/src/query_builder/selection/filter/collector/mod.rs: -------------------------------------------------------------------------------- 1 | //! This module contains helper types to combine multiple filter expressions 2 | //! into a final expression 3 | 4 | use crate::query_builder::selection::filter::build_filter::BuildFilter; 5 | use diesel::backend::Backend; 6 | use diesel::query_builder::QueryFragment; 7 | use diesel::AppearsOnTable; 8 | 9 | mod and; 10 | mod or; 11 | 12 | #[doc(inline)] 13 | pub use self::and::AndCollector; 14 | #[doc(inline)] 15 | pub use self::or::OrCollector; 16 | 17 | /// A trait indicating that some type could collect multiple separate filter 18 | /// expressions into one single expression 19 | pub trait FilterCollector<'a, T, DB: Backend> { 20 | /// Append a new filter expression to the already collected expressions 21 | fn append_filter(&mut self, f: F) 22 | where 23 | F: BuildFilter + 'a, 24 | F::Ret: AppearsOnTable + QueryFragment + 'a; 25 | } 26 | -------------------------------------------------------------------------------- /wundergraph/src/query_builder/selection/filter/collector/or.rs: -------------------------------------------------------------------------------- 1 | use super::FilterCollector; 2 | use crate::diesel_ext::BoxableFilter; 3 | use crate::query_builder::selection::filter::build_filter::BuildFilter; 4 | use diesel::backend::Backend; 5 | use diesel::query_builder::QueryFragment; 6 | use diesel::{AppearsOnTable, BoolExpressionMethods}; 7 | use std::fmt::{self, Debug}; 8 | 9 | /// A filter collected that combines all given filters using `or` 10 | pub struct OrCollector<'a, T, DB>( 11 | Option + 'a>>, 12 | ); 13 | 14 | impl<'a, T, DB> Debug for OrCollector<'a, T, DB> 15 | where 16 | DB: Backend, 17 | DB::QueryBuilder: Default, 18 | { 19 | fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result { 20 | fmt.debug_tuple("OrCollector") 21 | .field(&self.0.as_ref().map(|q| ::diesel::debug_query(q))) 22 | .finish() 23 | } 24 | } 25 | 26 | impl<'a, T, DB> Default for OrCollector<'a, T, DB> { 27 | fn default() -> Self { 28 | OrCollector(None) 29 | } 30 | } 31 | 32 | impl<'a, T, DB> FilterCollector<'a, T, DB> for OrCollector<'a, T, DB> 33 | where 34 | DB: Backend + 'a, 35 | T: 'a, 36 | { 37 | fn append_filter(&mut self, f: F) 38 | where 39 | F: BuildFilter + 'a, 40 | F::Ret: AppearsOnTable + QueryFragment + 'a, 41 | { 42 | let f = f.into_filter(); 43 | let c = ::std::mem::replace(&mut self.0, None); 44 | self.0 = match (c, f) { 45 | (Some(c), Some(f)) => Some(Box::new(c.or(f)) as Box<_>), 46 | (Some(c), None) => Some(c), 47 | (None, Some(f)) => Some(Box::new(f) as Box<_>), 48 | (None, None) => None, 49 | }; 50 | } 51 | } 52 | 53 | impl<'a, T, DB> BuildFilter for OrCollector<'a, T, DB> 54 | where 55 | DB: Backend, 56 | { 57 | type Ret = Box + 'a>; 58 | 59 | fn into_filter(self) -> Option { 60 | self.0 61 | } 62 | } 63 | -------------------------------------------------------------------------------- /wundergraph/src/query_builder/selection/filter/common_filter/eq.rs: -------------------------------------------------------------------------------- 1 | use std::marker::PhantomData; 2 | 3 | use crate::query_builder::selection::filter::build_filter::BuildFilter; 4 | use crate::scalar::WundergraphScalarValue; 5 | 6 | use crate::diesel_ext::BoxableFilter; 7 | use diesel::backend::Backend; 8 | use diesel::expression::{operators, AsExpression, Expression, NonAggregate}; 9 | use diesel::query_builder::QueryFragment; 10 | use diesel::serialize::ToSql; 11 | use diesel::sql_types::{Bool, HasSqlType}; 12 | use diesel::{AppearsOnTable, Column, ExpressionMethods}; 13 | 14 | use juniper::{InputValue, ToInputValue}; 15 | 16 | #[derive(Debug)] 17 | pub struct Eq(Option, PhantomData); 18 | 19 | impl Eq { 20 | pub(super) fn new(v: Option) -> Self { 21 | Self(v, PhantomData) 22 | } 23 | } 24 | 25 | impl Clone for Eq 26 | where 27 | T: Clone, 28 | { 29 | fn clone(&self) -> Self { 30 | Self(self.0.clone(), PhantomData) 31 | } 32 | } 33 | 34 | impl BuildFilter for Eq 35 | where 36 | C: ExpressionMethods + NonAggregate + Column + QueryFragment + Default + 'static, 37 | T: AsExpression + ToSql<::SqlType, DB>, 38 | T::Expression: NonAggregate + AppearsOnTable + QueryFragment + 'static, 39 | DB: Backend + HasSqlType<::SqlType> + 'static, 40 | C::Table: 'static, 41 | operators::Eq>::Expression>: 42 | AppearsOnTable, 43 | { 44 | type Ret = Box>; 45 | 46 | fn into_filter(self) -> Option { 47 | let Self(filter, _) = self; 48 | filter.map(|v| Box::new(C::default().eq(v)) as Box<_>) 49 | } 50 | } 51 | 52 | impl ToInputValue for Eq 53 | where 54 | T: ToInputValue, 55 | { 56 | fn to_input_value(&self) -> InputValue { 57 | self.0.to_input_value() 58 | } 59 | } 60 | -------------------------------------------------------------------------------- /wundergraph/src/query_builder/selection/filter/common_filter/eq_any.rs: -------------------------------------------------------------------------------- 1 | use std::marker::PhantomData; 2 | 3 | use crate::query_builder::selection::filter::build_filter::BuildFilter; 4 | use crate::scalar::WundergraphScalarValue; 5 | 6 | use crate::diesel_ext::BoxableFilter; 7 | use diesel::backend::Backend; 8 | use diesel::expression::array_comparison::{In, Many}; 9 | use diesel::expression::{AsExpression, Expression, NonAggregate}; 10 | use diesel::query_builder::QueryFragment; 11 | use diesel::serialize::ToSql; 12 | use diesel::sql_types::{Bool, HasSqlType}; 13 | use diesel::{AppearsOnTable, Column, ExpressionMethods}; 14 | 15 | use juniper::{InputValue, ToInputValue}; 16 | 17 | #[derive(Debug)] 18 | pub struct EqAny(Option>, PhantomData); 19 | 20 | impl EqAny { 21 | pub(super) fn new(v: Option>) -> Self { 22 | Self(v, PhantomData) 23 | } 24 | } 25 | 26 | impl Clone for EqAny 27 | where 28 | T: Clone, 29 | { 30 | fn clone(&self) -> Self { 31 | Self(self.0.clone(), PhantomData) 32 | } 33 | } 34 | 35 | impl BuildFilter for EqAny 36 | where 37 | DB: Backend + HasSqlType<::SqlType> + 'static, 38 | C: ExpressionMethods + NonAggregate + Column + QueryFragment + Default + 'static, 39 | T: AsExpression + ToSql<::SqlType, DB>, 40 | T::Expression: AppearsOnTable + QueryFragment + 'static, 41 | C::Table: 'static, 42 | In>::Expression>>: 43 | AppearsOnTable, 44 | { 45 | type Ret = Box>; 46 | 47 | fn into_filter(self) -> Option { 48 | let Self(filter, _) = self; 49 | filter.map(|v| Box::new(C::default().eq_any(v)) as Box<_>) 50 | } 51 | } 52 | 53 | impl ToInputValue for EqAny 54 | where 55 | T: ToInputValue, 56 | { 57 | fn to_input_value(&self) -> InputValue { 58 | self.0.to_input_value() 59 | } 60 | } 61 | -------------------------------------------------------------------------------- /wundergraph/src/query_builder/selection/filter/common_filter/not_eq.rs: -------------------------------------------------------------------------------- 1 | use std::marker::PhantomData; 2 | 3 | use crate::query_builder::selection::filter::build_filter::BuildFilter; 4 | use crate::scalar::WundergraphScalarValue; 5 | 6 | use crate::diesel_ext::BoxableFilter; 7 | use diesel::backend::Backend; 8 | use diesel::expression::{operators, AsExpression, Expression, NonAggregate}; 9 | use diesel::query_builder::QueryFragment; 10 | use diesel::serialize::ToSql; 11 | use diesel::sql_types::{Bool, HasSqlType}; 12 | use diesel::{AppearsOnTable, Column, ExpressionMethods}; 13 | 14 | use juniper::{InputValue, ToInputValue}; 15 | 16 | #[derive(Debug)] 17 | pub struct NotEq(Option, PhantomData); 18 | 19 | impl NotEq { 20 | pub(super) fn new(v: Option) -> Self { 21 | Self(v, PhantomData) 22 | } 23 | } 24 | 25 | impl Clone for NotEq 26 | where 27 | T: Clone, 28 | { 29 | fn clone(&self) -> Self { 30 | Self(self.0.clone(), PhantomData) 31 | } 32 | } 33 | 34 | impl BuildFilter for NotEq 35 | where 36 | C: ExpressionMethods + NonAggregate + Column + QueryFragment + Default + 'static, 37 | T: AsExpression + ToSql<::SqlType, DB>, 38 | T::Expression: NonAggregate + AppearsOnTable + QueryFragment + 'static, 39 | DB: Backend + HasSqlType<::SqlType> + 'static, 40 | C::Table: 'static, 41 | operators::NotEq>::Expression>: 42 | AppearsOnTable, 43 | { 44 | type Ret = Box>; 45 | 46 | fn into_filter(self) -> Option { 47 | let Self(filter, _) = self; 48 | filter.map(|v| Box::new(C::default().ne(v)) as Box<_>) 49 | } 50 | } 51 | 52 | impl ToInputValue for NotEq 53 | where 54 | T: ToInputValue, 55 | { 56 | fn to_input_value(&self) -> InputValue { 57 | self.0.to_input_value() 58 | } 59 | } 60 | -------------------------------------------------------------------------------- /wundergraph/src/query_builder/selection/filter/filter_value.rs: -------------------------------------------------------------------------------- 1 | use crate::juniper_ext::FromLookAheadValue; 2 | use crate::query_builder::selection::filter::nullable_filter::NullableFilter; 3 | use crate::query_builder::selection::filter::string_filter::StringFilter; 4 | use crate::scalar::WundergraphScalarValue; 5 | use juniper::{FromInputValue, ToInputValue}; 6 | 7 | /// A fundamental trait marking that a filter could be constructed for a given type 8 | /// 9 | /// The generic parameter `C` represents the type of the column (from diesel) 10 | /// For most implementations this should just be generic over all compatible columns 11 | pub trait FilterValue { 12 | /// The raw value type 13 | /// 14 | /// Normally this is the same as `Self` but there are cases like `Option` 15 | /// where we want something other (`T`) as input for our filter operations 16 | type RawValue: Clone 17 | + FromInputValue 18 | + FromLookAheadValue 19 | + ToInputValue; 20 | /// A type describing possible additional filters 21 | /// 22 | /// For some cases a type supports more operations that just the default set 23 | /// (eq, neq, gt, lt, …). In such cases a type representing those additional 24 | /// operations could be specified here. 25 | /// If there are no additional operations just use `()` 26 | type AdditionalFilter; 27 | } 28 | 29 | impl FilterValue for i16 { 30 | type RawValue = Self; 31 | type AdditionalFilter = (); 32 | } 33 | 34 | impl FilterValue for i32 { 35 | type RawValue = Self; 36 | type AdditionalFilter = (); 37 | } 38 | 39 | impl FilterValue for i64 { 40 | type RawValue = Self; 41 | type AdditionalFilter = (); 42 | } 43 | 44 | impl FilterValue for String { 45 | type RawValue = Self; 46 | type AdditionalFilter = StringFilter; 47 | } 48 | 49 | impl FilterValue for bool { 50 | type RawValue = Self; 51 | type AdditionalFilter = (); 52 | } 53 | 54 | impl FilterValue for f32 { 55 | type RawValue = Self; 56 | type AdditionalFilter = (); 57 | } 58 | 59 | impl FilterValue for f64 { 60 | type RawValue = Self; 61 | type AdditionalFilter = (); 62 | } 63 | 64 | impl FilterValue for Vec 65 | where 66 | V: FromLookAheadValue 67 | + FromInputValue 68 | + ToInputValue 69 | + FilterValue 70 | + Clone, 71 | { 72 | type RawValue = Self; 73 | type AdditionalFilter = (); 74 | } 75 | 76 | impl FilterValue for Option 77 | where 78 | V: Clone 79 | + FromInputValue 80 | + FromLookAheadValue 81 | + ToInputValue 82 | + FilterValue, 83 | { 84 | type RawValue = V; 85 | type AdditionalFilter = NullableFilter; 86 | } 87 | -------------------------------------------------------------------------------- /wundergraph/src/query_builder/selection/filter/inner_filter.rs: -------------------------------------------------------------------------------- 1 | use crate::juniper_ext::{NameBuilder, Nameable}; 2 | use crate::scalar::WundergraphScalarValue; 3 | use indexmap::IndexMap; 4 | use juniper::meta::Argument; 5 | use juniper::{InputValue, LookAheadValue, Registry}; 6 | 7 | /// A trait marking that some type is part of a filter 8 | /// 9 | /// The main objective of this trait is to allow adding a 10 | /// new filter type without implementing multiple traits. 11 | pub trait InnerFilter: Sized + Nameable { 12 | /// The used context type 13 | type Context; 14 | 15 | /// The number of fields created by this filter 16 | const FIELD_COUNT: usize; 17 | 18 | /// Create the given filter from a graphql input value 19 | fn from_inner_input_value( 20 | v: IndexMap<&str, &InputValue>, 21 | ) -> Option; 22 | /// Create the given filter from a graphql lookahead value 23 | fn from_inner_look_ahead(v: &[(&str, LookAheadValue<'_, WundergraphScalarValue>)]) -> Self; 24 | /// Covert the given filter into a graphql value 25 | fn to_inner_input_value(&self, v: &mut IndexMap<&str, InputValue>); 26 | /// Register all fields of the the filter in a given graphql schema 27 | /// 28 | /// This method should register exactly `FIELD_COUNT` new fields 29 | fn register_fields<'r>( 30 | info: &NameBuilder, 31 | registry: &mut Registry<'r, WundergraphScalarValue>, 32 | ) -> Vec>; 33 | } 34 | 35 | impl InnerFilter for () { 36 | type Context = (); 37 | 38 | const FIELD_COUNT: usize = 0; 39 | 40 | fn from_inner_input_value( 41 | _v: IndexMap<&str, &InputValue>, 42 | ) -> Option { 43 | Some(()) 44 | } 45 | 46 | fn from_inner_look_ahead(_v: &[(&str, LookAheadValue<'_, WundergraphScalarValue>)]) -> Self {} 47 | fn to_inner_input_value(&self, _v: &mut IndexMap<&str, InputValue>) {} 48 | fn register_fields<'r>( 49 | _info: &NameBuilder, 50 | _registry: &mut Registry<'r, WundergraphScalarValue>, 51 | ) -> Vec> { 52 | vec![] 53 | } 54 | } 55 | -------------------------------------------------------------------------------- /wundergraph/src/query_builder/selection/filter/not.rs: -------------------------------------------------------------------------------- 1 | use super::BuildFilter; 2 | use crate::juniper_ext::{FromLookAheadValue, NameBuilder, Nameable}; 3 | use crate::scalar::WundergraphScalarValue; 4 | use diesel::backend::Backend; 5 | use diesel::dsl; 6 | use diesel::helper_types; 7 | use juniper::meta::MetaType; 8 | use juniper::{FromInputValue, GraphQLType, InputValue, LookAheadValue, Registry, ToInputValue}; 9 | 10 | /// A filter node representing a negation operation 11 | #[derive(Debug)] 12 | pub struct Not(I); 13 | 14 | impl BuildFilter for Not 15 | where 16 | DB: Backend, 17 | I: BuildFilter, 18 | { 19 | type Ret = helper_types::not; 20 | 21 | fn into_filter(self) -> Option { 22 | self.0.into_filter().map(dsl::not) 23 | } 24 | } 25 | 26 | impl Nameable for Not 27 | where 28 | I: Nameable, 29 | { 30 | fn name() -> String { 31 | format!("Not_{}", I::name()) 32 | } 33 | } 34 | 35 | impl FromInputValue for Not 36 | where 37 | I: FromInputValue, 38 | { 39 | fn from_input_value(v: &InputValue) -> Option { 40 | I::from_input_value(v).map(Self) 41 | } 42 | } 43 | 44 | impl FromLookAheadValue for Not 45 | where 46 | I: FromLookAheadValue, 47 | { 48 | fn from_look_ahead(v: &LookAheadValue<'_, WundergraphScalarValue>) -> Option { 49 | I::from_look_ahead(v).map(Self) 50 | } 51 | } 52 | 53 | impl ToInputValue for Not 54 | where 55 | I: ToInputValue, 56 | { 57 | fn to_input_value(&self) -> InputValue { 58 | I::to_input_value(&self.0) 59 | } 60 | } 61 | 62 | impl GraphQLType for Not 63 | where 64 | F: GraphQLType, 65 | F::TypeInfo: Default, 66 | { 67 | type Context = F::Context; 68 | type TypeInfo = NameBuilder; 69 | 70 | fn name(info: &Self::TypeInfo) -> Option<&str> { 71 | Some(info.name()) 72 | } 73 | 74 | fn meta<'r>( 75 | _info: &Self::TypeInfo, 76 | registry: &mut Registry<'r, WundergraphScalarValue>, 77 | ) -> MetaType<'r, WundergraphScalarValue> 78 | where 79 | WundergraphScalarValue: 'r, 80 | { 81 | F::meta(&Default::default(), registry) 82 | } 83 | } 84 | -------------------------------------------------------------------------------- /wundergraph/src/query_builder/selection/filter/nullable_filter/filter_option.rs: -------------------------------------------------------------------------------- 1 | use crate::diesel_ext::BoxableFilter; 2 | use crate::juniper_ext::{FromLookAheadValue, NameBuilder, Nameable}; 3 | use crate::query_builder::selection::filter::build_filter::BuildFilter; 4 | use crate::query_builder::selection::filter::collector::{AndCollector, FilterCollector}; 5 | use crate::query_builder::selection::filter::filter_value::FilterValue; 6 | use crate::query_builder::selection::filter::inner_filter::InnerFilter; 7 | use crate::scalar::WundergraphScalarValue; 8 | use diesel::backend::Backend; 9 | use diesel::expression::{AsExpression, NonAggregate}; 10 | use diesel::query_builder::QueryFragment; 11 | use diesel::sql_types::{Bool, SingleValue}; 12 | use diesel::{AppearsOnTable, Column}; 13 | use indexmap::IndexMap; 14 | use juniper::meta::Argument; 15 | use juniper::{FromInputValue, InputValue, LookAheadValue, Registry}; 16 | 17 | use super::IsNull; 18 | 19 | #[derive(Debug)] 20 | pub struct NullableFilter 21 | where 22 | V: FilterValue, 23 | { 24 | is_null: Option>, 25 | additional: V::AdditionalFilter, 26 | } 27 | 28 | impl Clone for NullableFilter 29 | where 30 | V: FilterValue, 31 | V::AdditionalFilter: Clone, 32 | { 33 | fn clone(&self) -> Self { 34 | Self { 35 | is_null: self.is_null.clone(), 36 | additional: self.additional.clone(), 37 | } 38 | } 39 | } 40 | 41 | impl BuildFilter for NullableFilter 42 | where 43 | C: Column + NonAggregate + QueryFragment + Default + 'static, 44 | C::SqlType: SingleValue, 45 | C::Table: 'static, 46 | DB: Backend + 'static, 47 | V: FilterValue + 'static, 48 | V::AdditionalFilter: BuildFilter, 49 | >::Ret: AppearsOnTable + QueryFragment, 50 | V::RawValue: AsExpression + 'static, 51 | >::Expression: 52 | AppearsOnTable + NonAggregate + QueryFragment + 'static, 53 | IsNull: BuildFilter, 54 | as BuildFilter>::Ret: AppearsOnTable + QueryFragment, 55 | { 56 | type Ret = Box>; 57 | 58 | fn into_filter(self) -> Option { 59 | let mut combinator = AndCollector::default(); 60 | combinator.append_filter(self.is_null); 61 | combinator.append_filter(self.additional); 62 | combinator.into_filter() 63 | } 64 | } 65 | 66 | impl Nameable for NullableFilter 67 | where 68 | V: Nameable + FilterValue, 69 | { 70 | fn name() -> String { 71 | format!("NullableFilter_{}_", V::name()) 72 | } 73 | } 74 | 75 | impl InnerFilter for NullableFilter 76 | where 77 | V: FilterValue + Nameable, 78 | V::AdditionalFilter: InnerFilter, 79 | { 80 | type Context = (); 81 | 82 | const FIELD_COUNT: usize = 1 + V::AdditionalFilter::FIELD_COUNT; 83 | fn from_inner_input_value( 84 | obj: IndexMap<&str, &InputValue>, 85 | ) -> Option { 86 | let is_null = obj.get("is_null").map(|v| bool::from_input_value(v)); 87 | let is_null = match is_null { 88 | Some(Some(b)) => Some(IsNull::new(b)), 89 | Some(None) => return None, 90 | None => None, 91 | }; 92 | let additional = match V::AdditionalFilter::from_inner_input_value(obj) { 93 | Some(a) => a, 94 | None => return None, 95 | }; 96 | Some(Self { 97 | is_null, 98 | additional, 99 | }) 100 | } 101 | 102 | fn from_inner_look_ahead(obj: &[(&str, LookAheadValue<'_, WundergraphScalarValue>)]) -> Self { 103 | let is_null = obj 104 | .iter() 105 | .find(|o| o.0 == "is_null") 106 | .and_then(|o| bool::from_look_ahead(&o.1)) 107 | .map(IsNull::new); 108 | let additional = V::AdditionalFilter::from_inner_look_ahead(obj); 109 | Self { 110 | is_null, 111 | additional, 112 | } 113 | } 114 | 115 | fn to_inner_input_value(&self, _v: &mut IndexMap<&str, InputValue>) {} 116 | 117 | fn register_fields<'r>( 118 | _info: &NameBuilder, 119 | registry: &mut Registry<'r, WundergraphScalarValue>, 120 | ) -> Vec> { 121 | let is_null = registry.arg_with_default::>("is_null", &None, &()); 122 | let additional = V::AdditionalFilter::register_fields(&NameBuilder::default(), registry); 123 | let mut ret = vec![is_null]; 124 | ret.extend(additional); 125 | ret 126 | } 127 | } 128 | -------------------------------------------------------------------------------- /wundergraph/src/query_builder/selection/filter/nullable_filter/is_null.rs: -------------------------------------------------------------------------------- 1 | use crate::diesel_ext::BoxableFilter; 2 | use crate::juniper_ext::{FromLookAheadValue, NameBuilder, Nameable}; 3 | use crate::query_builder::selection::filter::build_filter::BuildFilter; 4 | use crate::query_builder::selection::filter::inner_filter::InnerFilter; 5 | use crate::scalar::WundergraphScalarValue; 6 | use diesel::backend::Backend; 7 | use diesel::expression::{operators, NonAggregate}; 8 | use diesel::query_builder::QueryFragment; 9 | use diesel::sql_types::Bool; 10 | use diesel::{AppearsOnTable, Column, ExpressionMethods}; 11 | use indexmap::IndexMap; 12 | use juniper::meta::Argument; 13 | use juniper::{FromInputValue, InputValue, LookAheadValue, Registry, ToInputValue}; 14 | use std::marker::PhantomData; 15 | 16 | #[derive(Debug)] 17 | pub struct IsNull(bool, PhantomData); 18 | 19 | impl IsNull { 20 | pub(crate) fn new(v: bool) -> Self { 21 | Self(v, PhantomData) 22 | } 23 | } 24 | 25 | impl Clone for IsNull { 26 | fn clone(&self) -> Self { 27 | Self(self.0, PhantomData) 28 | } 29 | } 30 | 31 | impl BuildFilter for IsNull 32 | where 33 | C: Column + ExpressionMethods + NonAggregate + QueryFragment + Default + 'static, 34 | DB: Backend + 'static, 35 | C::Table: 'static, 36 | operators::IsNull: AppearsOnTable, 37 | operators::IsNotNull: AppearsOnTable, 38 | { 39 | type Ret = Box>; 40 | 41 | fn into_filter(self) -> Option { 42 | if self.0 { 43 | Some(Box::new(C::default().is_null()) as Box<_>) 44 | } else { 45 | Some(Box::new(C::default().is_not_null()) as Box<_>) 46 | } 47 | } 48 | } 49 | 50 | impl ToInputValue for IsNull { 51 | fn to_input_value(&self) -> InputValue { 52 | self.0.to_input_value() 53 | } 54 | } 55 | 56 | impl Nameable for IsNull { 57 | fn name() -> String { 58 | String::from("is_null") 59 | } 60 | } 61 | 62 | //That's a false positive by clippy 63 | #[allow(clippy::use_self)] 64 | impl InnerFilter for Option> { 65 | type Context = (); 66 | 67 | const FIELD_COUNT: usize = 1; 68 | fn from_inner_input_value( 69 | obj: IndexMap<&str, &InputValue>, 70 | ) -> Option { 71 | let is_null = obj.get("is_null").map(|v| bool::from_input_value(v)); 72 | match is_null { 73 | Some(Some(b)) => Some(Some(IsNull::new(b))), 74 | Some(None) => None, 75 | None => Some(None), 76 | } 77 | } 78 | 79 | fn from_inner_look_ahead(obj: &[(&str, LookAheadValue<'_, WundergraphScalarValue>)]) -> Self { 80 | obj.iter() 81 | .find(|o| o.0 == "is_null") 82 | .and_then(|o| bool::from_look_ahead(&o.1)) 83 | .map(IsNull::new) 84 | } 85 | 86 | fn to_inner_input_value(&self, v: &mut IndexMap<&str, InputValue>) { 87 | v.insert("is_null", self.to_input_value()); 88 | } 89 | 90 | fn register_fields<'r>( 91 | _info: &NameBuilder, 92 | registry: &mut Registry<'r, WundergraphScalarValue>, 93 | ) -> Vec> { 94 | let is_null = registry.arg_with_default::>("is_null", &None, &()); 95 | vec![is_null] 96 | } 97 | } 98 | -------------------------------------------------------------------------------- /wundergraph/src/query_builder/selection/filter/nullable_filter/mod.rs: -------------------------------------------------------------------------------- 1 | mod filter_option; 2 | mod is_null; 3 | 4 | pub use self::filter_option::NullableFilter; 5 | pub use self::is_null::IsNull; 6 | -------------------------------------------------------------------------------- /wundergraph/src/query_builder/selection/filter/string_filter/like.rs: -------------------------------------------------------------------------------- 1 | use crate::diesel_ext::BoxableFilter; 2 | use crate::query_builder::selection::filter::build_filter::BuildFilter; 3 | use crate::scalar::WundergraphScalarValue; 4 | use diesel::backend::Backend; 5 | use diesel::expression::{operators, AsExpression, NonAggregate}; 6 | use diesel::query_builder::QueryFragment; 7 | use diesel::serialize::ToSql; 8 | use diesel::sql_types::{Bool, HasSqlType, Text}; 9 | use diesel::{AppearsOnTable, Column, TextExpressionMethods}; 10 | use juniper::{InputValue, ToInputValue}; 11 | use std::marker::PhantomData; 12 | 13 | #[derive(Debug)] 14 | pub struct Like(Option, ::std::marker::PhantomData); 15 | 16 | impl Like { 17 | pub(super) fn new(v: Option) -> Self { 18 | Self(v, PhantomData) 19 | } 20 | } 21 | 22 | impl Clone for Like { 23 | fn clone(&self) -> Self { 24 | Self(self.0.clone(), PhantomData) 25 | } 26 | } 27 | 28 | impl BuildFilter for Like 29 | where 30 | C: TextExpressionMethods + NonAggregate + Column + QueryFragment + Default + 'static, 31 | String: AsExpression, 32 | >::Expression: 33 | NonAggregate + AppearsOnTable + QueryFragment + 'static, 34 | DB: Backend + HasSqlType + 'static, 35 | String: ToSql, 36 | C::Table: 'static, 37 | operators::Like>::Expression>: 38 | AppearsOnTable, 39 | { 40 | type Ret = Box>; 41 | 42 | fn into_filter(self) -> Option { 43 | let Self(filter, _) = self; 44 | filter.map(|v| Box::new(C::default().like(v)) as Box<_>) 45 | } 46 | } 47 | 48 | impl ToInputValue for Like { 49 | fn to_input_value(&self) -> InputValue { 50 | self.0.to_input_value() 51 | } 52 | } 53 | -------------------------------------------------------------------------------- /wundergraph/src/query_builder/selection/filter/string_filter/mod.rs: -------------------------------------------------------------------------------- 1 | use crate::diesel_ext::BoxableFilter; 2 | use crate::juniper_ext::{FromLookAheadValue, NameBuilder, Nameable}; 3 | use crate::query_builder::selection::filter::build_filter::BuildFilter; 4 | use crate::query_builder::selection::filter::inner_filter::InnerFilter; 5 | use crate::scalar::WundergraphScalarValue; 6 | use diesel::backend::Backend; 7 | use diesel::sql_types::Bool; 8 | use diesel::Column; 9 | use indexmap::IndexMap; 10 | use juniper::meta::Argument; 11 | use juniper::{FromInputValue, InputValue, LookAheadValue, Registry, ToInputValue}; 12 | 13 | mod like; 14 | use self::like::Like; 15 | 16 | #[derive(Debug)] 17 | pub struct StringFilter { 18 | like: Like, 19 | } 20 | 21 | impl Clone for StringFilter { 22 | fn clone(&self) -> Self { 23 | Self { 24 | like: self.like.clone(), 25 | } 26 | } 27 | } 28 | 29 | impl Nameable for StringFilter { 30 | fn name() -> String { 31 | String::new() 32 | } 33 | } 34 | 35 | impl BuildFilter for StringFilter 36 | where 37 | DB: Backend, 38 | C: Column, 39 | Like: BuildFilter>>, 40 | { 41 | type Ret = Box>; 42 | 43 | fn into_filter(self) -> Option { 44 | self.like.into_filter() 45 | } 46 | } 47 | 48 | impl InnerFilter for StringFilter { 49 | type Context = (); 50 | 51 | const FIELD_COUNT: usize = 1; 52 | 53 | fn from_inner_input_value( 54 | obj: IndexMap<&str, &InputValue>, 55 | ) -> Option { 56 | let like = Like::new(obj.get("like").map_or_else( 57 | || { 58 | let v: &InputValue = &InputValue::Null; 59 | Option::from_input_value(v) 60 | }, 61 | |v| Option::from_input_value(*v), 62 | )?); 63 | Some(Self { like }) 64 | } 65 | 66 | fn from_inner_look_ahead(obj: &[(&str, LookAheadValue<'_, WundergraphScalarValue>)]) -> Self { 67 | let like = obj 68 | .iter() 69 | .find(|o| o.0 == "like") 70 | .and_then(|o| String::from_look_ahead(&o.1)); 71 | Self { 72 | like: Like::new(like), 73 | } 74 | } 75 | 76 | fn to_inner_input_value(&self, map: &mut IndexMap<&str, InputValue>) { 77 | map.insert("like", self.like.to_input_value()); 78 | } 79 | 80 | fn register_fields<'r>( 81 | _info: &NameBuilder, 82 | registry: &mut Registry<'r, WundergraphScalarValue>, 83 | ) -> Vec> { 84 | let like = registry.arg_with_default::>("like", &None, &Default::default()); 85 | vec![like] 86 | } 87 | } 88 | -------------------------------------------------------------------------------- /wundergraph/src/query_builder/selection/offset.rs: -------------------------------------------------------------------------------- 1 | use crate::error::Result; 2 | #[cfg(any(feature = "postgres", feature = "sqlite"))] 3 | use crate::error::WundergraphError; 4 | #[cfg(any(feature = "postgres", feature = "sqlite"))] 5 | use crate::juniper_ext::FromLookAheadValue; 6 | use crate::query_builder::selection::{BoxedQuery, LoadingHandler}; 7 | use crate::scalar::WundergraphScalarValue; 8 | use diesel::backend::Backend; 9 | #[cfg(feature = "sqlite")] 10 | use diesel::query_dsl::methods::LimitDsl; 11 | #[cfg(any(feature = "postgres", feature = "sqlite"))] 12 | use diesel::query_dsl::methods::OffsetDsl; 13 | use juniper::LookAheadSelection; 14 | 15 | /// A trait abstracting over the different behaviour of limit/offset 16 | /// clauses in different database systems 17 | pub trait ApplyOffset: Backend { 18 | /// Add a offset clause to the given query if requested 19 | fn apply_offset<'a, L, Ctx>( 20 | query: BoxedQuery<'a, L, Self, Ctx>, 21 | select: &LookAheadSelection<'_, WundergraphScalarValue>, 22 | ) -> Result> 23 | where 24 | L: LoadingHandler; 25 | } 26 | 27 | #[cfg(feature = "postgres")] 28 | impl ApplyOffset for diesel::pg::Pg { 29 | fn apply_offset<'a, L, Ctx>( 30 | query: BoxedQuery<'a, L, Self, Ctx>, 31 | select: &LookAheadSelection<'_, WundergraphScalarValue>, 32 | ) -> Result> 33 | where 34 | L: LoadingHandler, 35 | { 36 | use juniper::LookAheadMethods; 37 | if let Some(offset) = select.argument("offset") { 38 | Ok(<_ as OffsetDsl>::offset( 39 | query, 40 | i64::from_look_ahead(offset.value()) 41 | .ok_or(WundergraphError::CouldNotBuildFilterArgument)?, 42 | )) 43 | } else { 44 | Ok(query) 45 | } 46 | } 47 | } 48 | 49 | #[cfg(feature = "sqlite")] 50 | impl ApplyOffset for diesel::sqlite::Sqlite { 51 | fn apply_offset<'a, L, Ctx>( 52 | query: BoxedQuery<'a, L, Self, Ctx>, 53 | select: &LookAheadSelection<'_, WundergraphScalarValue>, 54 | ) -> Result> 55 | where 56 | L: LoadingHandler, 57 | { 58 | use juniper::LookAheadMethods; 59 | if let Some(offset) = select.argument("offset") { 60 | let q = <_ as OffsetDsl>::offset( 61 | query, 62 | i64::from_look_ahead(offset.value()) 63 | .ok_or(WundergraphError::CouldNotBuildFilterArgument)?, 64 | ); 65 | if select.argument("limit").is_some() { 66 | Ok(q) 67 | } else { 68 | Ok(<_ as LimitDsl>::limit(q, -1)) 69 | } 70 | } else { 71 | Ok(query) 72 | } 73 | } 74 | } 75 | -------------------------------------------------------------------------------- /wundergraph/src/query_builder/selection/query_modifier.rs: -------------------------------------------------------------------------------- 1 | use super::{BoxedQuery, LoadingHandler}; 2 | use crate::context::WundergraphContext; 3 | use crate::error::Result; 4 | use crate::query_builder::selection::offset::ApplyOffset; 5 | use crate::scalar::WundergraphScalarValue; 6 | use diesel::backend::Backend; 7 | use diesel::query_builder::QueryFragment; 8 | use diesel::{Connection, QuerySource}; 9 | use juniper::LookAheadSelection; 10 | 11 | /// A trait to modify the query generated by the default `LoadingHandler` implementation 12 | /// 13 | /// A blanket implementation is provided for using a connection as context, otherwise 14 | /// this trait needs to be implemented for the user defined context. 15 | /// There are two ways to do this: 16 | /// * Provide a blanket implementation for all types implementing `LoadingHandler`. 17 | /// See the default implementation for connection types for an example. 18 | /// * Add a specialized implementation for each type that implements `LoadingHandler`/ 19 | /// derives [`#[derive(WundergraphEntity)]`](derive.WundergraphEntity.html). 20 | /// Those types are represented by the generic 21 | /// parameter `L`. This approach allows a fine level of control about the final 22 | /// query to load data from the database. It is possible to cancel a query or add 23 | /// additional query clauses depending on the provided context or the actual graphql 24 | /// request. 25 | pub trait QueryModifier: WundergraphContext + Sized 26 | where 27 | L: LoadingHandler, 28 | DB: Backend + ApplyOffset + 'static, 29 | { 30 | /// A function that allows you to customize the default `LoadingHandler` implementation. 31 | /// See the top level documentation of this trait for more details. 32 | fn modify_query<'a>( 33 | &self, 34 | select: &LookAheadSelection<'_, WundergraphScalarValue>, 35 | query: BoxedQuery<'a, L, DB, Self>, 36 | ) -> Result>; 37 | } 38 | 39 | impl QueryModifier for Conn 40 | where 41 | T: LoadingHandler, 42 | Conn: Connection + 'static, 43 | DB: Backend + ApplyOffset + 'static, 44 | T::Table: 'static, 45 | ::FromClause: QueryFragment, 46 | DB::QueryBuilder: Default, 47 | { 48 | fn modify_query<'a>( 49 | &self, 50 | _select: &LookAheadSelection<'_, WundergraphScalarValue>, 51 | query: BoxedQuery<'a, T, DB, Self>, 52 | ) -> Result> { 53 | Ok(query) 54 | } 55 | } 56 | -------------------------------------------------------------------------------- /wundergraph/src/query_builder/selection/select.rs: -------------------------------------------------------------------------------- 1 | use crate::diesel_ext::MaybeNull; 2 | use crate::error::Result; 3 | use crate::scalar::WundergraphScalarValue; 4 | use diesel::backend::Backend; 5 | use diesel::expression::NonAggregate; 6 | use diesel::query_builder::QueryFragment; 7 | use diesel::{BoxableExpression, Column, Expression, ExpressionMethods, SelectableExpression}; 8 | use juniper::LookAheadMethods; 9 | use juniper::LookAheadSelection; 10 | 11 | /// A helper trait to construct a select clause for a given table out of 12 | /// a given graphql request 13 | pub trait BuildSelect { 14 | /// Construct the select clause out of a given graphql request 15 | fn build_select( 16 | select: &LookAheadSelection<'_, WundergraphScalarValue>, 17 | get_field_name: impl Fn(usize) -> &'static str, 18 | is_primary_key_index: impl Fn(usize) -> bool, 19 | should_select_primary_key: bool, 20 | ) -> Result>>; 21 | } 22 | 23 | macro_rules! impl_select_builder { 24 | ($( 25 | $Tuple:tt { 26 | $(($idx:tt) -> $T:ident, $ST: ident, $TT: ident,) + 27 | } 28 | )+) => { 29 | $( 30 | impl BuildSelect< 31 | Table, DB, ($( as Expression>::SqlType,)+ ), 32 | > for ($($T,)+) 33 | where Table: ::diesel::Table, 34 | DB: Backend, 35 | $($T: Column
+ Default + ExpressionMethods + 36 | SelectableExpression
+ NonAggregate + QueryFragment + 'static ,)+ 37 | $(MaybeNull<$T>: Expression,)+ 38 | { 39 | fn build_select( 40 | select: &LookAheadSelection<'_, WundergraphScalarValue>, 41 | get_field_name: impl Fn(usize) -> &'static str, 42 | is_primary_key_index: impl Fn(usize) -> bool, 43 | should_select_primary_key: bool, 44 | ) -> Result< 45 | Box< 46 | dyn BoxableExpression< 47 | Table, 48 | DB, 49 | SqlType = ($( as Expression>::SqlType,)+)>, 50 | >> 51 | { 52 | Ok(Box::new(( 53 | $( 54 | if select.has_child(get_field_name($idx)) || 55 | (is_primary_key_index($idx) && should_select_primary_key) 56 | { 57 | MaybeNull::Expr($T::default()) 58 | } else { 59 | MaybeNull::Null 60 | }, 61 | )+ 62 | )) as Box<_>) 63 | } 64 | } 65 | )+ 66 | } 67 | } 68 | 69 | __diesel_for_each_tuple!(impl_select_builder); 70 | -------------------------------------------------------------------------------- /wundergraph/src/query_builder/types/field_value_resolver/direct_resolver.rs: -------------------------------------------------------------------------------- 1 | use super::{FieldValueResolver, ResolveWundergraphFieldValue}; 2 | use crate::error::Result; 3 | use crate::error::WundergraphError; 4 | use crate::query_builder::types::WundergraphValue; 5 | use crate::scalar::WundergraphScalarValue; 6 | use diesel::backend::Backend; 7 | use juniper::{Executor, FromContext, GraphQLType, Selection}; 8 | 9 | #[derive(Debug, Clone, Copy)] 10 | pub struct DirectResolver; 11 | 12 | impl FieldValueResolver for DirectResolver 13 | where 14 | DB: Backend, 15 | T: GraphQLType + WundergraphValue, 16 | T::PlaceHolder: Into>, 17 | >::Context: FromContext, 18 | { 19 | fn new(_elements: usize) -> Self { 20 | Self 21 | } 22 | 23 | fn resolve_value( 24 | &mut self, 25 | value: T::PlaceHolder, 26 | _look_ahead: &juniper::LookAheadSelection<'_, WundergraphScalarValue>, 27 | _selection: Option<&'_ [Selection<'_, WundergraphScalarValue>]>, 28 | executor: &Executor<'_, Ctx, WundergraphScalarValue>, 29 | ) -> Result>> { 30 | Ok(Some( 31 | executor 32 | .resolve_with_ctx(&(), &value.into().expect("Loading should not fail")) 33 | .map_err(|inner| WundergraphError::JuniperError { inner })?, 34 | )) 35 | } 36 | 37 | fn finalize( 38 | self, 39 | _global_args: &[juniper::LookAheadArgument], 40 | _look_ahead: &juniper::LookAheadSelection<'_, WundergraphScalarValue>, 41 | _selection: Option<&'_ [Selection<'_, WundergraphScalarValue>]>, 42 | _executor: &Executor<'_, Ctx, WundergraphScalarValue>, 43 | ) -> Result>>> { 44 | Ok(None) 45 | } 46 | } 47 | 48 | impl ResolveWundergraphFieldValue for T 49 | where 50 | DB: Backend, 51 | T: GraphQLType + WundergraphValue, 52 | DirectResolver: FieldValueResolver, 53 | { 54 | type Resolver = DirectResolver; 55 | } 56 | -------------------------------------------------------------------------------- /wundergraph/src/query_builder/types/field_value_resolver/mod.rs: -------------------------------------------------------------------------------- 1 | use super::WundergraphValue; 2 | use crate::error::Result; 3 | use crate::scalar::WundergraphScalarValue; 4 | use diesel::backend::Backend; 5 | use juniper::{Executor, Selection}; 6 | 7 | mod direct_resolver; 8 | mod has_one_resolver; 9 | 10 | /// A internal helper trait indicating how to resolve a given type while query 11 | /// execution 12 | pub trait ResolveWundergraphFieldValue: WundergraphValue + Sized { 13 | /// A type implementing `FieldValueResolver` used to resolve values of 14 | /// this type during query execution 15 | type Resolver: FieldValueResolver; 16 | } 17 | 18 | pub trait FieldValueResolver 19 | where 20 | T: WundergraphValue, 21 | DB: Backend, 22 | { 23 | fn new(elements: usize) -> Self; 24 | 25 | fn resolve_value( 26 | &mut self, 27 | value: T::PlaceHolder, 28 | look_ahead: &juniper::LookAheadSelection<'_, WundergraphScalarValue>, 29 | selection: Option<&'_ [Selection<'_, WundergraphScalarValue>]>, 30 | executor: &Executor<'_, Ctx, WundergraphScalarValue>, 31 | ) -> Result>>; 32 | 33 | fn finalize( 34 | self, 35 | global_args: &[juniper::LookAheadArgument], 36 | look_ahead: &juniper::LookAheadSelection<'_, WundergraphScalarValue>, 37 | selection: Option<&'_ [Selection<'_, WundergraphScalarValue>]>, 38 | executor: &Executor<'_, Ctx, WundergraphScalarValue>, 39 | ) -> Result>>>; 40 | } 41 | -------------------------------------------------------------------------------- /wundergraph/src/query_builder/types/has_many.rs: -------------------------------------------------------------------------------- 1 | use crate::graphql_type::WundergraphGraphqlMapper; 2 | use crate::scalar::WundergraphScalarValue; 3 | use juniper::{meta, Registry}; 4 | use std::marker::PhantomData; 5 | 6 | /// Type used to indicate that a given field references multiple other entities 7 | /// by a given id 8 | #[derive(Debug, Clone, Hash, PartialEq, Eq)] 9 | pub struct HasMany(Vec, PhantomData); 10 | 11 | impl WundergraphGraphqlMapper for HasMany 12 | where 13 | T: WundergraphGraphqlMapper, 14 | { 15 | type GraphQLType = Vec; 16 | 17 | fn register_arguments<'r>( 18 | registry: &mut Registry<'r, WundergraphScalarValue>, 19 | field: meta::Field<'r, WundergraphScalarValue>, 20 | ) -> meta::Field<'r, WundergraphScalarValue> { 21 | T::register_arguments(registry, field) 22 | } 23 | } 24 | -------------------------------------------------------------------------------- /wundergraph/src/query_builder/types/mod.rs: -------------------------------------------------------------------------------- 1 | //! This module contains several helper types used constructing the final 2 | //! graphql model 3 | 4 | pub(crate) mod field_value_resolver; 5 | mod has_many; 6 | mod has_one; 7 | pub(crate) mod placeholder; 8 | mod wundergraph_value; 9 | 10 | pub use self::field_value_resolver::ResolveWundergraphFieldValue; 11 | pub use self::has_many::HasMany; 12 | pub use self::has_one::HasOne; 13 | pub use self::placeholder::PlaceHolder; 14 | pub use self::wundergraph_value::WundergraphValue; 15 | -------------------------------------------------------------------------------- /wundergraph/src/query_builder/types/placeholder.rs: -------------------------------------------------------------------------------- 1 | use diesel::backend::Backend; 2 | use diesel::deserialize::{self, FromSql}; 3 | use diesel::sql_types::{NotNull, Nullable}; 4 | 5 | pub trait PlaceHolderMarker { 6 | type InnerType; 7 | 8 | fn into_inner(self) -> Option; 9 | } 10 | 11 | /// A wrapper type used inside of wundergraph to load values of the type T 12 | /// from the database 13 | #[derive(Debug, Clone, Copy, PartialEq, Eq, Ord, PartialOrd, FromSqlRow, Hash)] 14 | pub struct PlaceHolder(Option); 15 | 16 | impl PlaceHolderMarker for PlaceHolder { 17 | type InnerType = T; 18 | 19 | fn into_inner(self) -> Option { 20 | self.0 21 | } 22 | } 23 | 24 | impl Default for PlaceHolder { 25 | fn default() -> Self { 26 | Self(None) 27 | } 28 | } 29 | 30 | impl Into> for PlaceHolder { 31 | fn into(self) -> Option { 32 | self.0 33 | } 34 | } 35 | 36 | impl Into>> for PlaceHolder { 37 | fn into(self) -> Option> { 38 | Some(self.0) 39 | } 40 | } 41 | 42 | impl<'a, T> Into> for &'a PlaceHolder { 43 | fn into(self) -> Option<&'a T> { 44 | self.0.as_ref() 45 | } 46 | } 47 | 48 | impl FromSql, DB> for PlaceHolder 49 | where 50 | DB: Backend, 51 | T: FromSql, 52 | ST: NotNull, 53 | { 54 | fn from_sql(bytes: Option<&DB::RawValue>) -> deserialize::Result { 55 | if bytes.is_some() { 56 | T::from_sql(bytes).map(Some).map(Self) 57 | } else { 58 | Ok(Self(None)) 59 | } 60 | } 61 | } 62 | -------------------------------------------------------------------------------- /wundergraph/src/third_party_integrations/chrono.rs: -------------------------------------------------------------------------------- 1 | use crate::juniper_ext::{FromLookAheadValue, Nameable}; 2 | use crate::query_builder::selection::filter::filter_helper::AsColumnFilter; 3 | use crate::query_builder::selection::filter::filter_value::FilterValue; 4 | use crate::query_builder::selection::filter::FilterOption; 5 | use crate::query_builder::types::{PlaceHolder, WundergraphValue}; 6 | use crate::scalar::WundergraphScalarValue; 7 | use chrono_internal::{DateTime, FixedOffset, NaiveDate, NaiveDateTime, TimeZone, Utc}; 8 | use diesel::sql_types::{Date, Nullable, Timestamp}; 9 | use juniper::{FromInputValue, LookAheadValue, ToInputValue}; 10 | 11 | impl From for WundergraphScalarValue { 12 | fn from(n: NaiveDateTime) -> Self { 13 | WundergraphScalarValue::Double(n.timestamp() as _) 14 | } 15 | } 16 | 17 | impl Nameable for NaiveDateTime { 18 | fn name() -> String { 19 | String::from("NaiveDateTime") 20 | } 21 | } 22 | 23 | impl Nameable for DateTime 24 | where 25 | O: TimeZone, 26 | { 27 | fn name() -> String { 28 | String::from("DateTime") 29 | } 30 | } 31 | impl Nameable for NaiveDate { 32 | fn name() -> String { 33 | String::from("Date") 34 | } 35 | } 36 | 37 | static RFC3339_PARSE_FORMAT: &str = "%+"; 38 | static RFC3339_FORMAT: &str = "%Y-%m-%dT%H:%M:%S%.f%:z"; 39 | 40 | impl FromLookAheadValue for NaiveDateTime { 41 | fn from_look_ahead(v: &LookAheadValue<'_, WundergraphScalarValue>) -> Option { 42 | if let LookAheadValue::Scalar(WundergraphScalarValue::String(ref s)) = *v { 43 | Self::parse_from_str(s, RFC3339_PARSE_FORMAT).ok() 44 | } else { 45 | None 46 | } 47 | } 48 | } 49 | 50 | impl FromLookAheadValue for DateTime { 51 | fn from_look_ahead(v: &LookAheadValue<'_, WundergraphScalarValue>) -> Option { 52 | if let LookAheadValue::Scalar(WundergraphScalarValue::String(ref s)) = *v { 53 | s.parse().ok() 54 | } else { 55 | None 56 | } 57 | } 58 | } 59 | 60 | impl FromLookAheadValue for DateTime { 61 | fn from_look_ahead(v: &LookAheadValue<'_, WundergraphScalarValue>) -> Option { 62 | if let LookAheadValue::Scalar(WundergraphScalarValue::String(ref s)) = *v { 63 | Self::parse_from_rfc3339(s).ok() 64 | } else { 65 | None 66 | } 67 | } 68 | } 69 | 70 | impl FromLookAheadValue for NaiveDate { 71 | fn from_look_ahead(v: &LookAheadValue<'_, WundergraphScalarValue>) -> Option { 72 | if let LookAheadValue::Scalar(WundergraphScalarValue::String(ref s)) = *v { 73 | Self::parse_from_str(s, RFC3339_FORMAT).ok() 74 | } else { 75 | None 76 | } 77 | } 78 | } 79 | 80 | impl WundergraphValue for NaiveDateTime { 81 | type PlaceHolder = PlaceHolder; 82 | type SqlType = Nullable; 83 | } 84 | 85 | #[cfg(feature = "postgres")] 86 | impl WundergraphValue for DateTime { 87 | type PlaceHolder = PlaceHolder; 88 | type SqlType = Nullable; 89 | } 90 | 91 | impl WundergraphValue for NaiveDate { 92 | type PlaceHolder = PlaceHolder; 93 | type SqlType = Nullable; 94 | } 95 | 96 | impl FilterValue for NaiveDateTime { 97 | type RawValue = Self; 98 | type AdditionalFilter = (); 99 | } 100 | 101 | impl FilterValue for DateTime 102 | where 103 | O: TimeZone, 104 | Self: ToInputValue 105 | + FromInputValue 106 | + FromLookAheadValue, 107 | { 108 | type RawValue = Self; 109 | type AdditionalFilter = (); 110 | } 111 | 112 | impl FilterValue for NaiveDate { 113 | type RawValue = Self; 114 | type AdditionalFilter = (); 115 | } 116 | 117 | impl AsColumnFilter for NaiveDateTime { 118 | type Filter = FilterOption; 119 | } 120 | 121 | impl AsColumnFilter for DateTime { 122 | type Filter = FilterOption; 123 | } 124 | 125 | impl AsColumnFilter for NaiveDate { 126 | type Filter = FilterOption; 127 | } 128 | -------------------------------------------------------------------------------- /wundergraph/src/third_party_integrations/mod.rs: -------------------------------------------------------------------------------- 1 | #[cfg(feature = "chrono")] 2 | mod chrono; 3 | #[cfg(all(feature = "uuid", feature = "postgres"))] 4 | mod uuid; 5 | -------------------------------------------------------------------------------- /wundergraph/src/third_party_integrations/uuid.rs: -------------------------------------------------------------------------------- 1 | use crate::juniper_ext::{FromLookAheadValue, Nameable}; 2 | use crate::query_builder::selection::filter::filter_helper::AsColumnFilter; 3 | use crate::query_builder::selection::filter::filter_value::FilterValue; 4 | use crate::query_builder::selection::filter::FilterOption; 5 | use crate::query_builder::types::{PlaceHolder, WundergraphValue}; 6 | use crate::scalar::WundergraphScalarValue; 7 | use diesel::sql_types::Nullable; 8 | use juniper::LookAheadValue; 9 | use uuid_internal::Uuid; 10 | 11 | impl Nameable for Uuid { 12 | fn name() -> String { 13 | String::from("Uuid") 14 | } 15 | } 16 | 17 | impl FromLookAheadValue for Uuid { 18 | fn from_look_ahead(v: &LookAheadValue<'_, WundergraphScalarValue>) -> Option { 19 | if let LookAheadValue::Scalar(WundergraphScalarValue::String(ref s)) = *v { 20 | Self::parse_str(s).ok() 21 | } else { 22 | None 23 | } 24 | } 25 | } 26 | 27 | impl WundergraphValue for Uuid { 28 | type PlaceHolder = PlaceHolder; 29 | type SqlType = Nullable<::diesel::sql_types::Uuid>; 30 | } 31 | 32 | impl AsColumnFilter for Uuid { 33 | type Filter = FilterOption; 34 | } 35 | 36 | impl FilterValue for Uuid { 37 | type RawValue = Self; 38 | type AdditionalFilter = (); 39 | } 40 | -------------------------------------------------------------------------------- /wundergraph/tests/alias.rs: -------------------------------------------------------------------------------- 1 | use crate::helper::*; 2 | use wundergraph_example::MyContext; 3 | 4 | #[test] 5 | fn check_alias() { 6 | let (schema, pool) = get_example_schema(); 7 | let ctx = MyContext::new(pool.get().unwrap()); 8 | 9 | let res = execute_query( 10 | &schema, 11 | &ctx, 12 | " 13 | { 14 | Heros { 15 | name: heroName 16 | HomeWorld: home_world { 17 | planet: name 18 | } 19 | } 20 | } 21 | ", 22 | ); 23 | assert!(res.is_ok()); 24 | assert_json_snapshot!( 25 | res.as_json(), @r###"[ 26 | { 27 | "Heros": [ 28 | { 29 | "HomeWorld": { 30 | "planet": "Tatooine" 31 | }, 32 | "name": "Luke Skywalker" 33 | }, 34 | { 35 | "HomeWorld": { 36 | "planet": "Tatooine" 37 | }, 38 | "name": "Darth Vader" 39 | }, 40 | { 41 | "HomeWorld": null, 42 | "name": "Han Solo" 43 | }, 44 | { 45 | "HomeWorld": { 46 | "planet": "Alderaan" 47 | }, 48 | "name": "Leia Organa" 49 | }, 50 | { 51 | "HomeWorld": null, 52 | "name": "Wilhuff Tarkin" 53 | } 54 | ] 55 | }, 56 | [] 57 | ]"### 58 | ); 59 | } 60 | -------------------------------------------------------------------------------- /wundergraph/tests/helper.rs: -------------------------------------------------------------------------------- 1 | #![allow(dead_code)] 2 | use crate::DbConnection; 3 | use diesel::r2d2::CustomizeConnection; 4 | use diesel::r2d2::*; 5 | use diesel::Connection; 6 | use juniper::*; 7 | use serde_json::*; 8 | use std::collections::HashMap; 9 | use std::path::PathBuf; 10 | use std::sync::Mutex; 11 | use wundergraph::scalar::WundergraphScalarValue; 12 | use wundergraph_bench::api::{Mutation as BenchMutation, Query as BenchQuery}; 13 | use wundergraph_bench::Schema as BenchSchema; 14 | use wundergraph_example::mutations::Mutation as ExampleMutation; 15 | use wundergraph_example::{MyContext, Query as ExampleQuery, Schema as ExampleSchema}; 16 | 17 | lazy_static! { 18 | static ref MIGRATION_LOCK: Mutex<()> = Mutex::new(()); 19 | } 20 | 21 | #[derive(Debug)] 22 | struct TestTransaction; 23 | 24 | impl CustomizeConnection for TestTransaction { 25 | fn on_acquire( 26 | &self, 27 | conn: &mut DbConnection, 28 | ) -> ::std::result::Result<(), ::diesel::r2d2::Error> { 29 | conn.begin_test_transaction().unwrap(); 30 | Ok(()) 31 | } 32 | } 33 | 34 | pub fn get_example_schema() -> ( 35 | ExampleSchema>, 36 | Pool>, 37 | ) { 38 | let db_url = ::std::env::var("DATABASE_URL") 39 | .expect("You need to set `DATABASE_URL` as environment variable"); 40 | { 41 | let _migration_lock = MIGRATION_LOCK.lock(); 42 | let conn = DbConnection::establish(&db_url).unwrap(); 43 | run_migrations(&conn, "wundergraph_example"); 44 | } 45 | let manager = ConnectionManager::::new(db_url); 46 | let pool = Pool::builder() 47 | .max_size(1) 48 | .connection_customizer(Box::new(TestTransaction)) 49 | .build(manager) 50 | .expect("Failed to init pool"); 51 | 52 | let query = ExampleQuery::>::default(); 53 | let mutation = ExampleMutation::>::default(); 54 | (ExampleSchema::new(query, mutation), pool) 55 | } 56 | 57 | pub fn get_bench_schema() -> ( 58 | BenchSchema, 59 | Pool>, 60 | ) { 61 | let db_url = ::std::env::var("DATABASE_URL") 62 | .expect("You need to set `DATABASE_URL` as environment variable"); 63 | { 64 | let conn = DbConnection::establish(&db_url).unwrap(); 65 | run_migrations(&conn, "wundergraph_bench"); 66 | } 67 | let manager = ConnectionManager::::new(db_url); 68 | let pool = Pool::builder() 69 | .max_size(1) 70 | .connection_customizer(Box::new(TestTransaction)) 71 | .build(manager) 72 | .expect("Failed to init pool"); 73 | 74 | run_migrations(&*pool.get().unwrap(), "wundergraph_bench"); 75 | let query = BenchQuery::default(); 76 | let mutation = BenchMutation::default(); 77 | (BenchSchema::new(query, mutation), pool) 78 | } 79 | 80 | fn run_migrations(conn: &DbConnection, which: &str) { 81 | let mut migration_path = PathBuf::from(env!("CARGO_MANIFEST_DIR")); 82 | migration_path.push(".."); 83 | migration_path.push(which); 84 | migration_path.push("migrations"); 85 | if cfg!(feature = "postgres") { 86 | migration_path.push("pg"); 87 | } else if cfg!(feature = "sqlite") { 88 | migration_path.push("sqlite"); 89 | } 90 | let pending_migrations = 91 | ::diesel_migrations::mark_migrations_in_directory(conn, &migration_path) 92 | .unwrap() 93 | .into_iter() 94 | .filter_map(|(migration, run)| if run { None } else { Some(migration) }); 95 | 96 | ::diesel_migrations::run_migrations(conn, pending_migrations, &mut ::std::io::stdout()) 97 | .unwrap(); 98 | } 99 | 100 | #[derive(Debug)] 101 | pub struct WundergraphResponse<'a>( 102 | ::std::result::Result< 103 | ( 104 | ::juniper::Value, 105 | Vec>, 106 | ), 107 | GraphQLError<'a>, 108 | >, 109 | ); 110 | 111 | pub fn execute_query<'a, Q, M, C>( 112 | schema: &'a RootNode, 113 | ctx: &C, 114 | query: &'a str, 115 | ) -> WundergraphResponse<'a> 116 | where 117 | Q: GraphQLType, 118 | M: GraphQLType, 119 | { 120 | execute_query_with_variables(schema, ctx, query, &[]) 121 | } 122 | 123 | pub fn execute_query_with_variables<'a, Q, M, C>( 124 | schema: &'a RootNode, 125 | ctx: &C, 126 | query: &'a str, 127 | vars: &[(&str, ::serde_json::Value)], 128 | ) -> WundergraphResponse<'a> 129 | where 130 | Q: GraphQLType, 131 | M: GraphQLType, 132 | { 133 | let vars = vars 134 | .into_iter() 135 | .map(|(ref k, v)| { 136 | let v = to_string(&v).unwrap(); 137 | let var = from_str(&v).unwrap(); 138 | ((*k).to_owned(), var) 139 | }) 140 | .collect::>(); 141 | 142 | WundergraphResponse(execute(query, None, schema, &vars, ctx)) 143 | } 144 | 145 | impl<'a> WundergraphResponse<'a> { 146 | pub fn is_ok(&self) -> bool { 147 | self.0.is_ok() 148 | } 149 | 150 | pub fn is_err(&self) -> bool { 151 | self.0.is_err() 152 | } 153 | 154 | pub fn as_json(self) -> ::serde_json::Value { 155 | ::serde_json::to_value(self.0.unwrap()).unwrap() 156 | } 157 | } 158 | -------------------------------------------------------------------------------- /wundergraph/tests/lib.rs: -------------------------------------------------------------------------------- 1 | extern crate diesel; 2 | extern crate diesel_migrations; 3 | extern crate juniper; 4 | extern crate serde_json; 5 | extern crate wundergraph; 6 | extern crate wundergraph_bench; 7 | extern crate wundergraph_example; 8 | #[macro_use] 9 | extern crate lazy_static; 10 | #[macro_use] 11 | extern crate insta; 12 | 13 | mod helper; 14 | 15 | mod alias; 16 | mod limit_offset; 17 | mod mutations; 18 | mod order; 19 | mod query; 20 | mod query_nested; 21 | mod simple; 22 | mod type_checking; 23 | 24 | #[cfg(feature = "postgres")] 25 | type DbConnection = diesel::pg::PgConnection; 26 | 27 | #[cfg(feature = "sqlite")] 28 | type DbConnection = diesel::sqlite::SqliteConnection; 29 | 30 | #[cfg(not(any(feature = "postgres", feature = "sqlite")))] 31 | compile_error!("At least one feature of \"sqlite\" or \"postgres\" needs to be enabled"); 32 | -------------------------------------------------------------------------------- /wundergraph/tests/limit_offset.rs: -------------------------------------------------------------------------------- 1 | use crate::helper::*; 2 | use wundergraph_example::MyContext; 3 | 4 | #[test] 5 | fn limit() { 6 | let (schema, pool) = get_example_schema(); 7 | let ctx = MyContext::new(pool.get().unwrap()); 8 | 9 | let res = execute_query( 10 | &schema, 11 | &ctx, 12 | " 13 | { 14 | Heros(limit: 2) { 15 | heroName 16 | } 17 | } 18 | ", 19 | ); 20 | assert!(res.is_ok()); 21 | assert_json_snapshot!( 22 | res.as_json(), @r###"[ 23 | { 24 | "Heros": [ 25 | { 26 | "heroName": "Luke Skywalker" 27 | }, 28 | { 29 | "heroName": "Darth Vader" 30 | } 31 | ] 32 | }, 33 | [] 34 | ]"### 35 | ); 36 | } 37 | 38 | #[test] 39 | fn offset() { 40 | let (schema, pool) = get_example_schema(); 41 | let ctx = MyContext::new(pool.get().unwrap()); 42 | 43 | let res = execute_query( 44 | &schema, 45 | &ctx, 46 | " 47 | { 48 | Heros(offset: 2) { 49 | heroName 50 | } 51 | } 52 | ", 53 | ); 54 | assert!(res.is_ok()); 55 | assert_json_snapshot!( 56 | res.as_json(), @r###"[ 57 | { 58 | "Heros": [ 59 | { 60 | "heroName": "Han Solo" 61 | }, 62 | { 63 | "heroName": "Leia Organa" 64 | }, 65 | { 66 | "heroName": "Wilhuff Tarkin" 67 | } 68 | ] 69 | }, 70 | [] 71 | ]"### 72 | ); 73 | } 74 | 75 | #[test] 76 | fn limit_offset() { 77 | let (schema, pool) = get_example_schema(); 78 | let ctx = MyContext::new(pool.get().unwrap()); 79 | 80 | let res = execute_query( 81 | &schema, 82 | &ctx, 83 | " 84 | { 85 | Heros(limit: 2, offset: 2) { 86 | heroName 87 | } 88 | } 89 | ", 90 | ); 91 | assert!(res.is_ok()); 92 | assert_json_snapshot!( 93 | res.as_json(), @r###"[ 94 | { 95 | "Heros": [ 96 | { 97 | "heroName": "Han Solo" 98 | }, 99 | { 100 | "heroName": "Leia Organa" 101 | } 102 | ] 103 | }, 104 | [] 105 | ]"### 106 | ); 107 | } 108 | -------------------------------------------------------------------------------- /wundergraph/tests/mutations/create.rs: -------------------------------------------------------------------------------- 1 | use crate::helper::*; 2 | use wundergraph_example::MyContext; 3 | 4 | #[test] 5 | fn create_one() { 6 | let (schema, pool) = get_example_schema(); 7 | let ctx = MyContext::new(pool.get().unwrap()); 8 | 9 | let res = execute_query( 10 | &schema, 11 | &ctx, 12 | " 13 | { 14 | Heros { 15 | heroName 16 | } 17 | } 18 | ", 19 | ); 20 | 21 | assert!(res.is_ok()); 22 | assert_json_snapshot!( 23 | res.as_json(), @r###"[ 24 | { 25 | "Heros": [ 26 | { 27 | "heroName": "Luke Skywalker" 28 | }, 29 | { 30 | "heroName": "Darth Vader" 31 | }, 32 | { 33 | "heroName": "Han Solo" 34 | }, 35 | { 36 | "heroName": "Leia Organa" 37 | }, 38 | { 39 | "heroName": "Wilhuff Tarkin" 40 | } 41 | ] 42 | }, 43 | [] 44 | ]"### 45 | ); 46 | 47 | let res = execute_query( 48 | &schema, 49 | &ctx, 50 | r#" 51 | mutation NewHero { 52 | CreateHero(NewHero: {name: "Obi-Wan Kenobi", species: 1}) { 53 | heroName 54 | species { 55 | name 56 | } 57 | } 58 | } 59 | "#, 60 | ); 61 | 62 | assert!(res.is_ok()); 63 | assert_json_snapshot!( 64 | res.as_json(), @r###"[ 65 | { 66 | "CreateHero": { 67 | "heroName": "Obi-Wan Kenobi", 68 | "species": { 69 | "name": "Human" 70 | } 71 | } 72 | }, 73 | [] 74 | ]"### 75 | ); 76 | 77 | let res = execute_query( 78 | &schema, 79 | &ctx, 80 | " 81 | { 82 | Heros { 83 | heroName 84 | } 85 | } 86 | ", 87 | ); 88 | 89 | assert!(res.is_ok()); 90 | assert_json_snapshot!( 91 | res.as_json(), @r###"[ 92 | { 93 | "Heros": [ 94 | { 95 | "heroName": "Luke Skywalker" 96 | }, 97 | { 98 | "heroName": "Darth Vader" 99 | }, 100 | { 101 | "heroName": "Han Solo" 102 | }, 103 | { 104 | "heroName": "Leia Organa" 105 | }, 106 | { 107 | "heroName": "Wilhuff Tarkin" 108 | }, 109 | { 110 | "heroName": "Obi-Wan Kenobi" 111 | } 112 | ] 113 | }, 114 | [] 115 | ]"### 116 | ); 117 | } 118 | 119 | #[test] 120 | fn create_multiple() { 121 | let (schema, pool) = get_example_schema(); 122 | let ctx = MyContext::new(pool.get().unwrap()); 123 | 124 | let res = execute_query( 125 | &schema, 126 | &ctx, 127 | " 128 | { 129 | Heros { 130 | heroName 131 | } 132 | } 133 | ", 134 | ); 135 | 136 | assert!(res.is_ok()); 137 | assert_json_snapshot!( 138 | res.as_json(), @r###"[ 139 | { 140 | "Heros": [ 141 | { 142 | "heroName": "Luke Skywalker" 143 | }, 144 | { 145 | "heroName": "Darth Vader" 146 | }, 147 | { 148 | "heroName": "Han Solo" 149 | }, 150 | { 151 | "heroName": "Leia Organa" 152 | }, 153 | { 154 | "heroName": "Wilhuff Tarkin" 155 | } 156 | ] 157 | }, 158 | [] 159 | ]"### 160 | ); 161 | 162 | let res = execute_query( 163 | &schema, 164 | &ctx, 165 | r#" 166 | mutation NewHeros { 167 | CreateHeros(NewHeros: [{name: "Obi-Wan Kenobi", species: 1}, {name: "R2-D2", species: 2}]) { 168 | heroName 169 | species { 170 | name 171 | } 172 | } 173 | } 174 | "#, 175 | ); 176 | 177 | assert!(res.is_ok()); 178 | assert_json_snapshot!( 179 | res.as_json(), @r###"[ 180 | { 181 | "CreateHeros": [ 182 | { 183 | "heroName": "Obi-Wan Kenobi", 184 | "species": { 185 | "name": "Human" 186 | } 187 | }, 188 | { 189 | "heroName": "R2-D2", 190 | "species": { 191 | "name": "Robot" 192 | } 193 | } 194 | ] 195 | }, 196 | [] 197 | ]"### 198 | ); 199 | 200 | let res = execute_query( 201 | &schema, 202 | &ctx, 203 | " 204 | { 205 | Heros { 206 | heroName 207 | } 208 | } 209 | ", 210 | ); 211 | 212 | assert!(res.is_ok()); 213 | assert_json_snapshot!( 214 | res.as_json(), @r###"[ 215 | { 216 | "Heros": [ 217 | { 218 | "heroName": "Luke Skywalker" 219 | }, 220 | { 221 | "heroName": "Darth Vader" 222 | }, 223 | { 224 | "heroName": "Han Solo" 225 | }, 226 | { 227 | "heroName": "Leia Organa" 228 | }, 229 | { 230 | "heroName": "Wilhuff Tarkin" 231 | }, 232 | { 233 | "heroName": "Obi-Wan Kenobi" 234 | }, 235 | { 236 | "heroName": "R2-D2" 237 | } 238 | ] 239 | }, 240 | [] 241 | ]"### 242 | ); 243 | } 244 | -------------------------------------------------------------------------------- /wundergraph/tests/mutations/delete.rs: -------------------------------------------------------------------------------- 1 | use crate::helper::*; 2 | use wundergraph_example::MyContext; 3 | 4 | #[test] 5 | fn delete_existing() { 6 | let (schema, pool) = get_example_schema(); 7 | let ctx = MyContext::new(pool.get().unwrap()); 8 | 9 | let res = execute_query( 10 | &schema, 11 | &ctx, 12 | " 13 | { 14 | Heros { 15 | id 16 | heroName 17 | } 18 | } 19 | ", 20 | ); 21 | 22 | assert!(res.is_ok()); 23 | assert_json_snapshot!( 24 | res.as_json(), @r###"[ 25 | { 26 | "Heros": [ 27 | { 28 | "heroName": "Luke Skywalker", 29 | "id": 1 30 | }, 31 | { 32 | "heroName": "Darth Vader", 33 | "id": 2 34 | }, 35 | { 36 | "heroName": "Han Solo", 37 | "id": 3 38 | }, 39 | { 40 | "heroName": "Leia Organa", 41 | "id": 4 42 | }, 43 | { 44 | "heroName": "Wilhuff Tarkin", 45 | "id": 5 46 | } 47 | ] 48 | }, 49 | [] 50 | ]"### 51 | ); 52 | 53 | let res = execute_query( 54 | &schema, 55 | &ctx, 56 | r#" 57 | mutation DeleteHero { 58 | DeleteHero(DeleteHero: {id: 5}) { 59 | count 60 | } 61 | } 62 | "#, 63 | ); 64 | 65 | assert!(res.is_ok()); 66 | assert_json_snapshot!( 67 | res.as_json(), @r###"[ 68 | { 69 | "DeleteHero": { 70 | "count": 1 71 | } 72 | }, 73 | [] 74 | ]"### 75 | ); 76 | 77 | let res = execute_query( 78 | &schema, 79 | &ctx, 80 | " 81 | { 82 | Heros { 83 | id 84 | heroName 85 | } 86 | } 87 | ", 88 | ); 89 | 90 | assert!(res.is_ok()); 91 | assert_json_snapshot!( 92 | res.as_json(), @r###"[ 93 | { 94 | "Heros": [ 95 | { 96 | "heroName": "Luke Skywalker", 97 | "id": 1 98 | }, 99 | { 100 | "heroName": "Darth Vader", 101 | "id": 2 102 | }, 103 | { 104 | "heroName": "Han Solo", 105 | "id": 3 106 | }, 107 | { 108 | "heroName": "Leia Organa", 109 | "id": 4 110 | } 111 | ] 112 | }, 113 | [] 114 | ]"### 115 | ); 116 | } 117 | 118 | #[test] 119 | fn delete_non_existing() { 120 | let (schema, pool) = get_example_schema(); 121 | let ctx = MyContext::new(pool.get().unwrap()); 122 | 123 | let res = execute_query( 124 | &schema, 125 | &ctx, 126 | " 127 | { 128 | Heros { 129 | id 130 | heroName 131 | } 132 | } 133 | ", 134 | ); 135 | 136 | assert!(res.is_ok()); 137 | assert_json_snapshot!( 138 | res.as_json(), @r###"[ 139 | { 140 | "Heros": [ 141 | { 142 | "heroName": "Luke Skywalker", 143 | "id": 1 144 | }, 145 | { 146 | "heroName": "Darth Vader", 147 | "id": 2 148 | }, 149 | { 150 | "heroName": "Han Solo", 151 | "id": 3 152 | }, 153 | { 154 | "heroName": "Leia Organa", 155 | "id": 4 156 | }, 157 | { 158 | "heroName": "Wilhuff Tarkin", 159 | "id": 5 160 | } 161 | ] 162 | }, 163 | [] 164 | ]"### 165 | ); 166 | 167 | let res = execute_query( 168 | &schema, 169 | &ctx, 170 | r#" 171 | mutation DeleteHero { 172 | DeleteHero(DeleteHero: {id: 42}) { 173 | count 174 | } 175 | } 176 | "#, 177 | ); 178 | 179 | assert!(res.is_ok()); 180 | assert_json_snapshot!( 181 | res.as_json(), @r###"[ 182 | { 183 | "DeleteHero": { 184 | "count": 0 185 | } 186 | }, 187 | [] 188 | ]"### 189 | ); 190 | 191 | let res = execute_query( 192 | &schema, 193 | &ctx, 194 | " 195 | { 196 | Heros { 197 | id 198 | heroName 199 | } 200 | } 201 | ", 202 | ); 203 | 204 | assert!(res.is_ok()); 205 | assert_json_snapshot!( 206 | res.as_json(), @r###"[ 207 | { 208 | "Heros": [ 209 | { 210 | "heroName": "Luke Skywalker", 211 | "id": 1 212 | }, 213 | { 214 | "heroName": "Darth Vader", 215 | "id": 2 216 | }, 217 | { 218 | "heroName": "Han Solo", 219 | "id": 3 220 | }, 221 | { 222 | "heroName": "Leia Organa", 223 | "id": 4 224 | }, 225 | { 226 | "heroName": "Wilhuff Tarkin", 227 | "id": 5 228 | } 229 | ] 230 | }, 231 | [] 232 | ]"### 233 | ); 234 | } 235 | -------------------------------------------------------------------------------- /wundergraph/tests/mutations/mod.rs: -------------------------------------------------------------------------------- 1 | mod create; 2 | mod delete; 3 | mod update; 4 | -------------------------------------------------------------------------------- /wundergraph/tests/mutations/update.rs: -------------------------------------------------------------------------------- 1 | use crate::helper::*; 2 | use wundergraph_example::MyContext; 3 | 4 | #[test] 5 | fn update_existing() { 6 | let (schema, pool) = get_example_schema(); 7 | let ctx = MyContext::new(pool.get().unwrap()); 8 | 9 | let res = execute_query( 10 | &schema, 11 | &ctx, 12 | " 13 | { 14 | Heros { 15 | id 16 | heroName 17 | hair_color 18 | } 19 | } 20 | ", 21 | ); 22 | 23 | assert!(res.is_ok()); 24 | assert_json_snapshot!( 25 | res.as_json(), @r###"[ 26 | { 27 | "Heros": [ 28 | { 29 | "hair_color": "blond", 30 | "heroName": "Luke Skywalker", 31 | "id": 1 32 | }, 33 | { 34 | "hair_color": null, 35 | "heroName": "Darth Vader", 36 | "id": 2 37 | }, 38 | { 39 | "hair_color": null, 40 | "heroName": "Han Solo", 41 | "id": 3 42 | }, 43 | { 44 | "hair_color": null, 45 | "heroName": "Leia Organa", 46 | "id": 4 47 | }, 48 | { 49 | "hair_color": null, 50 | "heroName": "Wilhuff Tarkin", 51 | "id": 5 52 | } 53 | ] 54 | }, 55 | [] 56 | ]"### 57 | ); 58 | 59 | let res = execute_query( 60 | &schema, 61 | &ctx, 62 | r#" 63 | mutation updateHero { 64 | UpdateHero(UpdateHero: {id: 4, hairColor: "dark"}) { 65 | heroName 66 | hair_color 67 | } 68 | } 69 | "#, 70 | ); 71 | 72 | assert!(res.is_ok()); 73 | assert_json_snapshot!( 74 | res.as_json(), @r###"[ 75 | { 76 | "UpdateHero": { 77 | "hair_color": "dark", 78 | "heroName": "Leia Organa" 79 | } 80 | }, 81 | [] 82 | ]"### 83 | ); 84 | 85 | let res = execute_query( 86 | &schema, 87 | &ctx, 88 | " 89 | { 90 | Heros(order: [{column: id, direction: ASC}]) { 91 | id 92 | heroName 93 | hair_color 94 | } 95 | } 96 | ", 97 | ); 98 | 99 | assert!(res.is_ok()); 100 | assert_json_snapshot!( 101 | res.as_json(), @r###" 102 | [ 103 | { 104 | "Heros": [ 105 | { 106 | "hair_color": "blond", 107 | "heroName": "Luke Skywalker", 108 | "id": 1 109 | }, 110 | { 111 | "hair_color": null, 112 | "heroName": "Darth Vader", 113 | "id": 2 114 | }, 115 | { 116 | "hair_color": null, 117 | "heroName": "Han Solo", 118 | "id": 3 119 | }, 120 | { 121 | "hair_color": "dark", 122 | "heroName": "Leia Organa", 123 | "id": 4 124 | }, 125 | { 126 | "hair_color": null, 127 | "heroName": "Wilhuff Tarkin", 128 | "id": 5 129 | } 130 | ] 131 | }, 132 | [] 133 | ] 134 | "### 135 | ); 136 | } 137 | 138 | #[test] 139 | fn update_non_existing() { 140 | let (schema, pool) = get_example_schema(); 141 | let ctx = MyContext::new(pool.get().unwrap()); 142 | 143 | let res = execute_query( 144 | &schema, 145 | &ctx, 146 | " 147 | { 148 | Heros { 149 | id 150 | heroName 151 | hair_color 152 | } 153 | } 154 | ", 155 | ); 156 | 157 | assert!(res.is_ok()); 158 | assert_json_snapshot!( 159 | res.as_json(), @r###"[ 160 | { 161 | "Heros": [ 162 | { 163 | "hair_color": "blond", 164 | "heroName": "Luke Skywalker", 165 | "id": 1 166 | }, 167 | { 168 | "hair_color": null, 169 | "heroName": "Darth Vader", 170 | "id": 2 171 | }, 172 | { 173 | "hair_color": null, 174 | "heroName": "Han Solo", 175 | "id": 3 176 | }, 177 | { 178 | "hair_color": null, 179 | "heroName": "Leia Organa", 180 | "id": 4 181 | }, 182 | { 183 | "hair_color": null, 184 | "heroName": "Wilhuff Tarkin", 185 | "id": 5 186 | } 187 | ] 188 | }, 189 | [] 190 | ]"### 191 | ); 192 | 193 | let res = execute_query( 194 | &schema, 195 | &ctx, 196 | r#" 197 | mutation updateHero { 198 | UpdateHero(UpdateHero: {id: 42, hairColor: "dark"}) { 199 | heroName 200 | hair_color 201 | } 202 | } 203 | "#, 204 | ); 205 | 206 | assert!(res.is_ok()); 207 | assert_json_snapshot!( 208 | res.as_json(), @r###"[ 209 | { 210 | "UpdateHero": null 211 | }, 212 | [] 213 | ]"### 214 | ); 215 | 216 | let res = execute_query( 217 | &schema, 218 | &ctx, 219 | " 220 | { 221 | Heros { 222 | id 223 | heroName 224 | hair_color 225 | } 226 | } 227 | ", 228 | ); 229 | 230 | assert!(res.is_ok()); 231 | assert_json_snapshot!( 232 | res.as_json(), @r###"[ 233 | { 234 | "Heros": [ 235 | { 236 | "hair_color": "blond", 237 | "heroName": "Luke Skywalker", 238 | "id": 1 239 | }, 240 | { 241 | "hair_color": null, 242 | "heroName": "Darth Vader", 243 | "id": 2 244 | }, 245 | { 246 | "hair_color": null, 247 | "heroName": "Han Solo", 248 | "id": 3 249 | }, 250 | { 251 | "hair_color": null, 252 | "heroName": "Leia Organa", 253 | "id": 4 254 | }, 255 | { 256 | "hair_color": null, 257 | "heroName": "Wilhuff Tarkin", 258 | "id": 5 259 | } 260 | ] 261 | }, 262 | [] 263 | ]"### 264 | ); 265 | } 266 | -------------------------------------------------------------------------------- /wundergraph/tests/order.rs: -------------------------------------------------------------------------------- 1 | use crate::helper::*; 2 | use wundergraph_example::MyContext; 3 | 4 | #[test] 5 | fn order_asc() { 6 | let (schema, pool) = get_example_schema(); 7 | let ctx = MyContext::new(pool.get().unwrap()); 8 | 9 | let res = execute_query( 10 | &schema, 11 | &ctx, 12 | " 13 | { 14 | Heros(order: [{column: heroName, direction: ASC}]) { 15 | heroName 16 | } 17 | } 18 | ", 19 | ); 20 | assert!(res.is_ok()); 21 | assert_json_snapshot!( 22 | res.as_json(), @r###"[ 23 | { 24 | "Heros": [ 25 | { 26 | "heroName": "Darth Vader" 27 | }, 28 | { 29 | "heroName": "Han Solo" 30 | }, 31 | { 32 | "heroName": "Leia Organa" 33 | }, 34 | { 35 | "heroName": "Luke Skywalker" 36 | }, 37 | { 38 | "heroName": "Wilhuff Tarkin" 39 | } 40 | ] 41 | }, 42 | [] 43 | ]"### 44 | ); 45 | } 46 | 47 | #[test] 48 | fn order_desc() { 49 | let (schema, pool) = get_example_schema(); 50 | let ctx = MyContext::new(pool.get().unwrap()); 51 | 52 | let res = execute_query( 53 | &schema, 54 | &ctx, 55 | " 56 | { 57 | Heros(order: [{column: heroName, direction: DESC}]) { 58 | heroName 59 | } 60 | } 61 | ", 62 | ); 63 | assert!(res.is_ok()); 64 | assert_json_snapshot!( 65 | res.as_json(), @r###"[ 66 | { 67 | "Heros": [ 68 | { 69 | "heroName": "Wilhuff Tarkin" 70 | }, 71 | { 72 | "heroName": "Luke Skywalker" 73 | }, 74 | { 75 | "heroName": "Leia Organa" 76 | }, 77 | { 78 | "heroName": "Han Solo" 79 | }, 80 | { 81 | "heroName": "Darth Vader" 82 | } 83 | ] 84 | }, 85 | [] 86 | ]"### 87 | ); 88 | } 89 | 90 | #[test] 91 | fn invalid_order() { 92 | let (schema, pool) = get_example_schema(); 93 | let ctx = MyContext::new(pool.get().unwrap()); 94 | 95 | let res = execute_query( 96 | &schema, 97 | &ctx, 98 | " 99 | { 100 | Heros(order: {column: heroName, direction: DESC}) { 101 | heroName 102 | } 103 | } 104 | ", 105 | ); 106 | assert!(res.is_ok()); 107 | assert_json_snapshot!( 108 | res.as_json(), @r###" 109 | [ 110 | null, 111 | [ 112 | { 113 | "locations": [ 114 | { 115 | "column": 5, 116 | "line": 3 117 | } 118 | ], 119 | "message": "Could not build filter from arguments", 120 | "path": [ 121 | "Heros" 122 | ] 123 | } 124 | ] 125 | ] 126 | "### 127 | ); 128 | } 129 | -------------------------------------------------------------------------------- /wundergraph/tests/query_nested.rs: -------------------------------------------------------------------------------- 1 | use crate::helper::*; 2 | use wundergraph_example::MyContext; 3 | 4 | #[test] 5 | fn query_filter_eq_not_nullable_child() { 6 | let (schema, pool) = get_example_schema(); 7 | let ctx = MyContext::new(pool.get().unwrap()); 8 | 9 | let res = execute_query( 10 | &schema, 11 | &ctx, 12 | r#" 13 | { 14 | Heros(filter: {species: {name: {eq: "Human"}}}, order: [{column: id, direction: ASC}]) { 15 | heroName 16 | } 17 | } 18 | "#, 19 | ); 20 | println!("{:?}", res); 21 | assert!(res.is_ok()); 22 | assert_json_snapshot!( 23 | res.as_json(), @r###"[ 24 | { 25 | "Heros": [ 26 | { 27 | "heroName": "Luke Skywalker" 28 | }, 29 | { 30 | "heroName": "Darth Vader" 31 | }, 32 | { 33 | "heroName": "Han Solo" 34 | }, 35 | { 36 | "heroName": "Leia Organa" 37 | }, 38 | { 39 | "heroName": "Wilhuff Tarkin" 40 | } 41 | ] 42 | }, 43 | [] 44 | ]"### 45 | ); 46 | } 47 | 48 | #[test] 49 | fn query_filter_eq_nullable_child() { 50 | let (schema, pool) = get_example_schema(); 51 | let ctx = MyContext::new(pool.get().unwrap()); 52 | 53 | let res = execute_query( 54 | &schema, 55 | &ctx, 56 | r#" 57 | { 58 | Heros(filter: {home_world: {name: {eq: "Alderaan"}}}) { 59 | heroName 60 | } 61 | } 62 | "#, 63 | ); 64 | 65 | assert!(res.is_ok()); 66 | assert_json_snapshot!( 67 | res.as_json(), @r###"[ 68 | { 69 | "Heros": [ 70 | { 71 | "heroName": "Leia Organa" 72 | } 73 | ] 74 | }, 75 | [] 76 | ]"### 77 | ); 78 | } 79 | 80 | #[test] 81 | fn query_filter_nullable_child_is_null() { 82 | let (schema, pool) = get_example_schema(); 83 | let ctx = MyContext::new(pool.get().unwrap()); 84 | 85 | let res = execute_query( 86 | &schema, 87 | &ctx, 88 | r#" 89 | { 90 | Heros(filter: {home_world: {is_null: true}}) { 91 | heroName 92 | } 93 | } 94 | "#, 95 | ); 96 | 97 | assert!(res.is_ok()); 98 | assert_json_snapshot!( 99 | res.as_json(), @r###"[ 100 | { 101 | "Heros": [ 102 | { 103 | "heroName": "Han Solo" 104 | }, 105 | { 106 | "heroName": "Wilhuff Tarkin" 107 | } 108 | ] 109 | }, 110 | [] 111 | ]"### 112 | ); 113 | } 114 | 115 | #[test] 116 | fn query_filter_negative_expression() { 117 | let (schema, pool) = get_example_schema(); 118 | let ctx = MyContext::new(pool.get().unwrap()); 119 | 120 | let res = execute_query( 121 | &schema, 122 | &ctx, 123 | r#" 124 | { 125 | Heros(filter: {home_world: {name: {not_eq: "Tatooine"}}}) { 126 | heroName 127 | } 128 | } 129 | "#, 130 | ); 131 | 132 | assert!(res.is_ok()); 133 | // Only Leia has a home_world that is set and not "Tatooine" 134 | assert_json_snapshot!( 135 | res.as_json(), @r###"[ 136 | { 137 | "Heros": [ 138 | { 139 | "heroName": "Leia Organa" 140 | } 141 | ] 142 | }, 143 | [] 144 | ]"### 145 | ); 146 | } 147 | 148 | #[test] 149 | fn query_filter_double_nested() { 150 | let (schema, pool) = get_example_schema(); 151 | let ctx = MyContext::new(pool.get().unwrap()); 152 | 153 | let res = execute_query( 154 | &schema, 155 | &ctx, 156 | r#" 157 | { 158 | Heros(filter: {home_world: {heros: {heroName: {like: "Luke%"}}}}) { 159 | heroName 160 | } 161 | } 162 | "#, 163 | ); 164 | assert!(res.is_ok()); 165 | assert_json_snapshot!( 166 | res.as_json(), @r###"[ 167 | { 168 | "Heros": [ 169 | { 170 | "heroName": "Luke Skywalker" 171 | }, 172 | { 173 | "heroName": "Darth Vader" 174 | } 175 | ] 176 | }, 177 | [] 178 | ]"### 179 | ); 180 | } 181 | 182 | #[test] 183 | fn query_filter_double_nested_negative() { 184 | let (schema, pool) = get_example_schema(); 185 | let ctx = MyContext::new(pool.get().unwrap()); 186 | 187 | let res = execute_query( 188 | &schema, 189 | &ctx, 190 | r#" 191 | { 192 | Heros(filter: {home_world: {heros: {heroName: {not_eq: "Leia Organa"}}}}) { 193 | heroName 194 | } 195 | } 196 | "#, 197 | ); 198 | assert!(res.is_ok()); 199 | assert_json_snapshot!( 200 | res.as_json(), @r###"[ 201 | { 202 | "Heros": [ 203 | { 204 | "heroName": "Luke Skywalker" 205 | }, 206 | { 207 | "heroName": "Darth Vader" 208 | } 209 | ] 210 | }, 211 | [] 212 | ]"### 213 | ); 214 | } 215 | -------------------------------------------------------------------------------- /wundergraph/tests/simple.rs: -------------------------------------------------------------------------------- 1 | use crate::helper::*; 2 | use wundergraph_example::MyContext; 3 | 4 | #[test] 5 | fn simple_query_single_field() { 6 | let (schema, pool) = get_example_schema(); 7 | let ctx = MyContext::new(pool.get().unwrap()); 8 | 9 | let res = execute_query( 10 | &schema, 11 | &ctx, 12 | " 13 | { 14 | Heros { 15 | heroName 16 | } 17 | } 18 | ", 19 | ); 20 | assert!(res.is_ok()); 21 | assert_json_snapshot!( 22 | res.as_json(), @r###"[ 23 | { 24 | "Heros": [ 25 | { 26 | "heroName": "Luke Skywalker" 27 | }, 28 | { 29 | "heroName": "Darth Vader" 30 | }, 31 | { 32 | "heroName": "Han Solo" 33 | }, 34 | { 35 | "heroName": "Leia Organa" 36 | }, 37 | { 38 | "heroName": "Wilhuff Tarkin" 39 | } 40 | ] 41 | }, 42 | [] 43 | ]"### 44 | ); 45 | } 46 | 47 | #[test] 48 | fn simple_query_multiple_field() { 49 | let (schema, pool) = get_example_schema(); 50 | let ctx = MyContext::new(pool.get().unwrap()); 51 | 52 | let res = execute_query( 53 | &schema, 54 | &ctx, 55 | " 56 | { 57 | Heros { 58 | id 59 | heroName 60 | } 61 | } 62 | ", 63 | ); 64 | assert!(res.is_ok()); 65 | assert_json_snapshot!( 66 | res.as_json(), @r###"[ 67 | { 68 | "Heros": [ 69 | { 70 | "heroName": "Luke Skywalker", 71 | "id": 1 72 | }, 73 | { 74 | "heroName": "Darth Vader", 75 | "id": 2 76 | }, 77 | { 78 | "heroName": "Han Solo", 79 | "id": 3 80 | }, 81 | { 82 | "heroName": "Leia Organa", 83 | "id": 4 84 | }, 85 | { 86 | "heroName": "Wilhuff Tarkin", 87 | "id": 5 88 | } 89 | ] 90 | }, 91 | [] 92 | ]"### 93 | ); 94 | } 95 | 96 | #[test] 97 | fn simple_query_nested() { 98 | let (schema, pool) = get_example_schema(); 99 | let ctx = MyContext::new(pool.get().unwrap()); 100 | 101 | let res = execute_query( 102 | &schema, 103 | &ctx, 104 | " 105 | { 106 | Heros { 107 | heroName 108 | home_world { 109 | name 110 | } 111 | } 112 | } 113 | ", 114 | ); 115 | assert!(res.is_ok()); 116 | assert_json_snapshot!( 117 | res.as_json(), @r###"[ 118 | { 119 | "Heros": [ 120 | { 121 | "heroName": "Luke Skywalker", 122 | "home_world": { 123 | "name": "Tatooine" 124 | } 125 | }, 126 | { 127 | "heroName": "Darth Vader", 128 | "home_world": { 129 | "name": "Tatooine" 130 | } 131 | }, 132 | { 133 | "heroName": "Han Solo", 134 | "home_world": null 135 | }, 136 | { 137 | "heroName": "Leia Organa", 138 | "home_world": { 139 | "name": "Alderaan" 140 | } 141 | }, 142 | { 143 | "heroName": "Wilhuff Tarkin", 144 | "home_world": null 145 | } 146 | ] 147 | }, 148 | [] 149 | ]"### 150 | ); 151 | } 152 | -------------------------------------------------------------------------------- /wundergraph/tests/type_checking.rs: -------------------------------------------------------------------------------- 1 | use crate::helper::*; 2 | use wundergraph_example::MyContext; 3 | 4 | #[test] 5 | fn test_type_decoration() { 6 | let (schema, pool) = get_example_schema(); 7 | let ctx = MyContext::new(pool.get().unwrap()); 8 | 9 | let res = execute_query( 10 | &schema, 11 | &ctx, 12 | r#" 13 | { 14 | __type(name: "Hero") { 15 | name 16 | description 17 | fields { 18 | name 19 | description 20 | isDeprecated 21 | deprecationReason 22 | } 23 | } 24 | } 25 | "#, 26 | ); 27 | 28 | assert!(res.is_ok()); 29 | assert_json_snapshot!( 30 | res.as_json(), @r###"[ 31 | { 32 | "__type": { 33 | "description": "A hero from Star Wars", 34 | "fields": [ 35 | { 36 | "deprecationReason": null, 37 | "description": "Internal id of a hero", 38 | "isDeprecated": false, 39 | "name": "id" 40 | }, 41 | { 42 | "deprecationReason": null, 43 | "description": "The name of a hero", 44 | "isDeprecated": false, 45 | "name": "heroName" 46 | }, 47 | { 48 | "deprecationReason": null, 49 | "description": "Which species a hero belongs to", 50 | "isDeprecated": false, 51 | "name": "species" 52 | }, 53 | { 54 | "deprecationReason": null, 55 | "description": "On which world a hero was born", 56 | "isDeprecated": false, 57 | "name": "home_world" 58 | }, 59 | { 60 | "deprecationReason": null, 61 | "description": "Episodes a hero appears in", 62 | "isDeprecated": false, 63 | "name": "appears_in" 64 | }, 65 | { 66 | "deprecationReason": null, 67 | "description": "List of friends of the current hero", 68 | "isDeprecated": false, 69 | "name": "friends" 70 | } 71 | ], 72 | "name": "Hero" 73 | } 74 | }, 75 | [] 76 | ]"### 77 | ); 78 | } 79 | -------------------------------------------------------------------------------- /wundergraph_bench/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "wundergraph_bench" 3 | version = "0.1.0" 4 | authors = ["Georg Semmler "] 5 | publish = false 6 | license = "MIT OR Apache-2.0" 7 | repository = "https://github.com/weiznich/wundergraph" 8 | readme = "../README.md" 9 | keywords = ["GraphQL", "ORM", "PostgreSQL", "SQLite"] 10 | categories = ["database", "web-programming"] 11 | description = "A GraphQL ORM build on top of diesel" 12 | edition = "2018" 13 | 14 | [dependencies] 15 | wundergraph = {path = "../wundergraph", default-features = false, features = ["chrono"]} 16 | diesel = {version = "1.4", features = ["r2d2", "chrono"]} 17 | juniper = "0.14.0" 18 | actix-web = "1.0.0" 19 | failure = "0.1" 20 | serde = {version = "1", features = ["derive"]} 21 | serde_json = "1" 22 | env_logger = "0.7" 23 | chrono = "0.4" 24 | num_cpus = "1.8" 25 | structopt = "0.3" 26 | 27 | [features] 28 | default = ["postgres"] 29 | postgres = ["wundergraph/postgres", "diesel/postgres"] 30 | sqlite = ["wundergraph/sqlite", "diesel/sqlite"] 31 | -------------------------------------------------------------------------------- /wundergraph_bench/migrations/pg/2018-09-28-104319_setup/down.sql: -------------------------------------------------------------------------------- 1 | DROP TABLE film_categorys; 2 | DROP TABLE film_actors; 3 | DROP TABLE films; 4 | DROP TABLE actors; 5 | DROP TABLE categories; 6 | DROP TABLE playlist_tracks; 7 | DROP TABLE playlists; 8 | DROP TABLE invoice_lines; 9 | DROP TABLE invoices; 10 | DROP TABLE customers; 11 | DROP TABLE employees; 12 | DROP TABLE tracks; 13 | DROP TABLE genres; 14 | DROP TABLE albums; 15 | DROP TABLE artists; 16 | DROP TABLE media_types; 17 | 18 | DROP SEQUENCE album_album_id_seq; 19 | DROP SEQUENCE artist_artist_id_seq; 20 | DROP SEQUENCE customer_customer_id_seq; 21 | DROP SEQUENCE genre_genre_id_seq; 22 | DROP SEQUENCE employee_employee_id_seq; 23 | DROP SEQUENCE invoice_invoice_id_seq; 24 | DROP SEQUENCE invoiceline_invoiceline_id_seq; 25 | DROP SEQUENCE mediatype_mediatype_id_seq; 26 | DROP SEQUENCE playlist_playlist_id_seq; 27 | DROP SEQUENCE track_track_id_seq; 28 | DROP SEQUENCE actor_actor_id_seq; 29 | DROP SEQUENCE category_category_id_seq; 30 | DROP SEQUENCE film_film_id_seq; 31 | -------------------------------------------------------------------------------- /wundergraph_bench/migrations/sqlite/2018-09-28-104542_setup/down.sql: -------------------------------------------------------------------------------- 1 | DROP TABLE TRACK; 2 | DROP TABLE PlaylistTrack; 3 | DROP TABLE Playlist; 4 | DROP TABLE MediaType; 5 | DROP TABLE InvoiceLine; 6 | DROP TABLE Invoice; 7 | DROP TABLE Genre; 8 | DROP TABLE Employee; 9 | DROP TABLE Customer; 10 | DROP TABLE Artist; 11 | DROP TABLE Album; 12 | -------------------------------------------------------------------------------- /wundergraph_bench/src/bin/wundergraph_bench.rs: -------------------------------------------------------------------------------- 1 | #![deny(missing_debug_implementations, missing_copy_implementations)] 2 | #![warn( 3 | clippy::print_stdout, 4 | clippy::wrong_pub_self_convention, 5 | clippy::mut_mut, 6 | clippy::non_ascii_literal, 7 | clippy::similar_names, 8 | clippy::unicode_not_nfc, 9 | clippy::enum_glob_use, 10 | clippy::if_not_else, 11 | clippy::items_after_statements, 12 | clippy::used_underscore_binding, 13 | clippy::cargo_common_metadata, 14 | clippy::dbg_macro, 15 | clippy::doc_markdown, 16 | clippy::filter_map, 17 | clippy::map_flatten, 18 | clippy::match_same_arms, 19 | clippy::needless_borrow, 20 | clippy::option_map_unwrap_or, 21 | clippy::option_map_unwrap_or_else, 22 | clippy::redundant_clone, 23 | clippy::result_map_unwrap_or_else, 24 | clippy::unnecessary_unwrap, 25 | clippy::unseparated_literal_suffix, 26 | clippy::wildcard_dependencies 27 | )] 28 | 29 | use actix_web::web::{Data, Json}; 30 | use actix_web::{middleware, web, App, HttpResponse, HttpServer}; 31 | use diesel::r2d2::{ConnectionManager, Pool}; 32 | use failure::Error; 33 | use juniper::graphiql::graphiql_source; 34 | use juniper::http::GraphQLRequest; 35 | use serde::{Deserialize, Serialize}; 36 | use std::sync::Arc; 37 | use structopt::StructOpt; 38 | use wundergraph::scalar::WundergraphScalarValue; 39 | 40 | #[derive(Debug, StructOpt)] 41 | #[structopt(name = "wundergraph_bench")] 42 | struct Opt { 43 | #[structopt(short = "u", long = "db-url")] 44 | database_url: String, 45 | #[structopt(short = "s", long = "socket", default_value = "127.0.0.1:8000")] 46 | socket: String, 47 | } 48 | 49 | // actix integration stuff 50 | #[derive(Serialize, Deserialize, Debug)] 51 | pub struct GraphQLData(GraphQLRequest); 52 | 53 | #[derive(Clone)] 54 | struct AppState { 55 | schema: Arc>, 56 | pool: Arc>>, 57 | } 58 | 59 | fn graphiql() -> Result { 60 | let html = graphiql_source("/graphql"); 61 | Ok(HttpResponse::Ok() 62 | .content_type("text/html; charset=utf-8") 63 | .body(html)) 64 | } 65 | 66 | fn graphql( 67 | Json(GraphQLData(data)): Json, 68 | st: Data, 69 | ) -> Result { 70 | let ctx = st.get_ref().pool.get()?; 71 | let res = data.execute(&st.get_ref().schema, &ctx); 72 | Ok(HttpResponse::Ok() 73 | .content_type("application/json") 74 | .body(serde_json::to_string(&res)?)) 75 | } 76 | 77 | #[cfg(feature = "postgres")] 78 | type DbConnection = diesel::pg::PgConnection; 79 | 80 | #[cfg(feature = "sqlite")] 81 | type DbConnection = diesel::sqlite::SqliteConnection; 82 | 83 | #[allow(clippy::print_stdout)] 84 | fn main() { 85 | let opt = Opt::from_args(); 86 | let manager = ConnectionManager::::new(opt.database_url); 87 | let pool = Pool::builder() 88 | .max_size((num_cpus::get() * 2 * 4) as u32) 89 | .build(manager) 90 | .expect("Failed to init pool"); 91 | 92 | let query = wundergraph_bench::api::Query::default(); 93 | let mutation = wundergraph_bench::api::Mutation::default(); 94 | let schema = wundergraph_bench::Schema::new(query, mutation); 95 | 96 | let schema = Arc::new(schema); 97 | let pool = Arc::new(pool); 98 | let data = AppState { schema, pool }; 99 | let url = opt.socket; 100 | 101 | // Start http server 102 | println!("Started http server: http://{}", url); 103 | 104 | HttpServer::new(move || { 105 | App::new() 106 | .data(data.clone()) 107 | .wrap(middleware::Logger::default()) 108 | .route("/graphql", web::get().to(graphql)) 109 | .route("/graphql", web::post().to(graphql)) 110 | .route("/graphiql", web::get().to(graphiql)) 111 | .default_service(web::route().to(|| { 112 | HttpResponse::Found() 113 | .header("location", "/graphiql") 114 | .finish() 115 | })) 116 | }) 117 | .bind(&url) 118 | .expect("Failed to start server") 119 | .run() 120 | .unwrap(); 121 | } 122 | -------------------------------------------------------------------------------- /wundergraph_bench/src/lib.rs: -------------------------------------------------------------------------------- 1 | #![deny(missing_debug_implementations, missing_copy_implementations)] 2 | #![warn( 3 | clippy::print_stdout, 4 | clippy::wrong_pub_self_convention, 5 | clippy::mut_mut, 6 | clippy::non_ascii_literal, 7 | clippy::similar_names, 8 | clippy::unicode_not_nfc, 9 | clippy::enum_glob_use, 10 | clippy::if_not_else, 11 | clippy::items_after_statements, 12 | clippy::used_underscore_binding, 13 | clippy::cargo_common_metadata, 14 | clippy::dbg_macro, 15 | clippy::doc_markdown, 16 | clippy::filter_map, 17 | clippy::map_flatten, 18 | clippy::match_same_arms, 19 | clippy::needless_borrow, 20 | clippy::option_map_unwrap_or, 21 | clippy::option_map_unwrap_or_else, 22 | clippy::redundant_clone, 23 | clippy::result_map_unwrap_or_else, 24 | clippy::unnecessary_unwrap, 25 | clippy::unseparated_literal_suffix, 26 | clippy::wildcard_dependencies 27 | )] 28 | 29 | #[macro_use] 30 | extern crate diesel; 31 | #[macro_use] 32 | extern crate juniper; 33 | #[macro_use] 34 | extern crate wundergraph; 35 | 36 | use diesel::r2d2::{ConnectionManager, PooledConnection}; 37 | 38 | use wundergraph::scalar::WundergraphScalarValue; 39 | 40 | pub mod api; 41 | 42 | pub type Schema = juniper::RootNode< 43 | 'static, 44 | self::api::Query>>, 45 | self::api::Mutation>>, 46 | WundergraphScalarValue, 47 | >; 48 | -------------------------------------------------------------------------------- /wundergraph_cli/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | authors = ["Georg Semmler "] 3 | name = "wundergraph_cli" 4 | version = "0.1.1" 5 | license = "MIT OR Apache-2.0" 6 | repository = "https://github.com/weiznich/wundergraph" 7 | readme = "../README.md" 8 | keywords = ["GraphQL", "ORM", "PostgreSQL", "SQLite"] 9 | categories = ["database", "web-programming"] 10 | description = "A helper tool to generate some code for using wundergraph with existing databases" 11 | edition = "2018" 12 | 13 | [dependencies] 14 | structopt = "0.3" 15 | clap = "2.27" 16 | diesel = "1.4" 17 | 18 | [dev-dependencies] 19 | dotenv = "0.15" 20 | insta = { version = "0.16", features = ["backtrace"] } 21 | tempdir = "0.3" 22 | reqwest = "0.9" 23 | serde_json = "1" 24 | 25 | [features] 26 | default = ["postgres", "sqlite"] 27 | sqlite = ["diesel/sqlite"] 28 | postgres = ["diesel/postgres"] 29 | -------------------------------------------------------------------------------- /wundergraph_cli/src/database.rs: -------------------------------------------------------------------------------- 1 | use diesel::*; 2 | 3 | use std::error::Error; 4 | 5 | enum Backend { 6 | #[cfg(feature = "postgres")] 7 | Pg, 8 | #[cfg(feature = "sqlite")] 9 | Sqlite, 10 | #[cfg(feature = "mysql")] 11 | Mysql, 12 | } 13 | 14 | impl Backend { 15 | fn for_url(database_url: &str) -> Self { 16 | match database_url { 17 | #[cfg(feature = "postgres")] 18 | _ if database_url.starts_with("postgres://") 19 | || database_url.starts_with("postgresql://") => 20 | { 21 | Backend::Pg 22 | } 23 | #[cfg(feature = "mysql")] 24 | _ if database_url.starts_with("mysql://") => 25 | { 26 | Backend::Mysql 27 | } 28 | #[cfg(feature = "sqlite")] 29 | _ => Backend::Sqlite, 30 | #[cfg(not(feature = "sqlite"))] 31 | _ => { 32 | let mut available_schemes: Vec<&str> = Vec::new(); 33 | 34 | // One of these will always be true, or you are compiling 35 | // diesel_cli without a backend. And why would you ever want to 36 | // do that? 37 | if cfg!(feature = "postgres") { 38 | available_schemes.push("`postgres://`"); 39 | } 40 | if cfg!(feature = "mysql") { 41 | available_schemes.push("`mysql://`"); 42 | } 43 | 44 | panic!( 45 | "`{}` is not a valid database URL. It should start with {}", 46 | database_url, 47 | available_schemes.join(" or ") 48 | ); 49 | } 50 | #[cfg(not(any(feature = "mysql", feature = "sqlite", feature = "postgres")))] 51 | _ => compile_error!( 52 | "At least one backend must be specified for use with this crate. \ 53 | You may omit the unneeded dependencies in the following command. \n\n \ 54 | ex. `cargo install diesel_cli --no-default-features --features mysql postgres sqlite` \n" 55 | ), 56 | } 57 | } 58 | } 59 | 60 | pub enum InferConnection { 61 | #[cfg(feature = "postgres")] 62 | Pg(PgConnection), 63 | #[cfg(feature = "sqlite")] 64 | Sqlite(SqliteConnection), 65 | #[cfg(feature = "mysql")] 66 | Mysql(MysqlConnection), 67 | } 68 | 69 | impl InferConnection { 70 | pub fn establish(database_url: &str) -> Result> { 71 | match Backend::for_url(database_url) { 72 | #[cfg(feature = "postgres")] 73 | Backend::Pg => PgConnection::establish(database_url).map(InferConnection::Pg), 74 | #[cfg(feature = "sqlite")] 75 | Backend::Sqlite => { 76 | SqliteConnection::establish(database_url).map(InferConnection::Sqlite) 77 | } 78 | #[cfg(feature = "mysql")] 79 | Backend::Mysql => MysqlConnection::establish(database_url).map(InferConnection::Mysql), 80 | } 81 | .map_err(Into::into) 82 | } 83 | } 84 | 85 | /* 86 | #[cfg(all(test, any(feature = "postgres", feature = "mysql")))] 87 | mod tests { 88 | use super::change_database_of_url; 89 | 90 | #[test] 91 | fn split_pg_connection_string_returns_postgres_url_and_database() { 92 | let database = "database".to_owned(); 93 | let base_url = "postgresql://localhost:5432".to_owned(); 94 | let database_url = format!("{}/{}", base_url, database); 95 | let postgres_url = format!("{}/{}", base_url, "postgres"); 96 | assert_eq!( 97 | (database, postgres_url), 98 | change_database_of_url(&database_url, "postgres") 99 | ); 100 | } 101 | 102 | #[test] 103 | fn split_pg_connection_string_handles_user_and_password() { 104 | let database = "database".to_owned(); 105 | let base_url = "postgresql://user:password@localhost:5432".to_owned(); 106 | let database_url = format!("{}/{}", base_url, database); 107 | let postgres_url = format!("{}/{}", base_url, "postgres"); 108 | assert_eq!( 109 | (database, postgres_url), 110 | change_database_of_url(&database_url, "postgres") 111 | ); 112 | } 113 | 114 | #[test] 115 | fn split_pg_connection_string_handles_query_string() { 116 | let database = "database".to_owned(); 117 | let query = "?sslmode=true".to_owned(); 118 | let base_url = "postgresql://user:password@localhost:5432".to_owned(); 119 | let database_url = format!("{}/{}{}", base_url, database, query); 120 | let postgres_url = format!("{}/{}{}", base_url, "postgres", query); 121 | assert_eq!( 122 | (database, postgres_url), 123 | change_database_of_url(&database_url, "postgres") 124 | ); 125 | } 126 | } 127 | */ 128 | -------------------------------------------------------------------------------- /wundergraph_cli/src/infer_schema_internals/data_structures.rs: -------------------------------------------------------------------------------- 1 | #[cfg(any(feature = "postgres", feature = "mysql"))] 2 | use diesel::backend::Backend; 3 | use diesel::deserialize::FromSqlRow; 4 | #[cfg(feature = "sqlite")] 5 | use diesel::sqlite::Sqlite; 6 | use diesel::*; 7 | 8 | #[cfg(any(feature = "postgres", feature = "mysql"))] 9 | use super::information_schema::UsesInformationSchema; 10 | use super::table_data::TableName; 11 | 12 | #[derive(Debug, Clone, PartialEq, Eq)] 13 | pub struct ColumnInformation { 14 | pub column_name: String, 15 | pub type_name: String, 16 | pub nullable: bool, 17 | pub has_default: bool, 18 | } 19 | 20 | #[derive(Debug)] 21 | pub struct ColumnType { 22 | pub rust_name: String, 23 | pub is_array: bool, 24 | pub is_nullable: bool, 25 | pub is_unsigned: bool, 26 | } 27 | 28 | use std::fmt; 29 | 30 | impl fmt::Display for ColumnType { 31 | fn fmt(&self, out: &mut fmt::Formatter<'_>) -> Result<(), fmt::Error> { 32 | if self.is_nullable { 33 | write!(out, "Nullable<")?; 34 | } 35 | if self.is_array { 36 | write!(out, "Array<")?; 37 | } 38 | if self.is_unsigned { 39 | write!(out, "Unsigned<")?; 40 | } 41 | write!(out, "{}", self.rust_name)?; 42 | if self.is_unsigned { 43 | write!(out, ">")?; 44 | } 45 | if self.is_array { 46 | write!(out, ">")?; 47 | } 48 | if self.is_nullable { 49 | write!(out, ">")?; 50 | } 51 | Ok(()) 52 | } 53 | } 54 | 55 | #[derive(Debug)] 56 | pub struct ColumnDefinition { 57 | pub sql_name: String, 58 | pub ty: ColumnType, 59 | pub docs: String, 60 | pub rust_name: Option, 61 | pub has_default: bool, 62 | } 63 | 64 | impl ColumnInformation { 65 | pub fn new(column_name: T, type_name: U, nullable: bool, has_default: bool) -> Self 66 | where 67 | T: Into, 68 | U: Into, 69 | { 70 | Self { 71 | column_name: column_name.into(), 72 | type_name: type_name.into(), 73 | nullable, 74 | has_default, 75 | } 76 | } 77 | } 78 | 79 | #[cfg(any(feature = "postgres", feature = "mysql"))] 80 | impl Queryable for ColumnInformation 81 | where 82 | DB: Backend + UsesInformationSchema, 83 | (String, String, String, Option): FromSqlRow, 84 | { 85 | type Row = (String, String, String, Option); 86 | 87 | fn build(row: Self::Row) -> Self { 88 | Self::new(row.0, row.1, row.2 == "YES", row.3.is_some()) 89 | } 90 | } 91 | 92 | #[cfg(feature = "sqlite")] 93 | impl Queryable for ColumnInformation 94 | where 95 | (i32, String, String, bool, Option, bool): FromSqlRow, 96 | { 97 | type Row = (i32, String, String, bool, Option, bool); 98 | 99 | fn build(row: Self::Row) -> Self { 100 | Self::new(row.1, row.2, !row.3, row.4.is_some()) 101 | } 102 | } 103 | 104 | #[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord)] 105 | pub struct ForeignKeyConstraint { 106 | pub child_table: TableName, 107 | pub parent_table: TableName, 108 | pub foreign_key: String, 109 | pub primary_key: String, 110 | } 111 | 112 | impl ForeignKeyConstraint { 113 | pub fn ordered_tables(&self) -> (&TableName, &TableName) { 114 | use std::cmp::{max, min}; 115 | ( 116 | min(&self.parent_table, &self.child_table), 117 | max(&self.parent_table, &self.child_table), 118 | ) 119 | } 120 | } 121 | -------------------------------------------------------------------------------- /wundergraph_cli/src/infer_schema_internals/foreign_keys.rs: -------------------------------------------------------------------------------- 1 | use super::data_structures::ForeignKeyConstraint; 2 | use super::inference::get_primary_keys; 3 | use super::table_data::TableName; 4 | use crate::database::InferConnection; 5 | 6 | pub fn remove_unsafe_foreign_keys_for_codegen( 7 | connection: &InferConnection, 8 | foreign_keys: &[ForeignKeyConstraint], 9 | safe_tables: &[TableName], 10 | ) -> Vec { 11 | let duplicates = foreign_keys 12 | .iter() 13 | .map(ForeignKeyConstraint::ordered_tables) 14 | .filter(|tables| { 15 | let dup_count = foreign_keys 16 | .iter() 17 | .filter(|fk| tables == &fk.ordered_tables()) 18 | .count(); 19 | dup_count > 1 20 | }) 21 | .collect::>(); 22 | 23 | foreign_keys 24 | .iter() 25 | .filter(|fk| fk.parent_table != fk.child_table) 26 | .filter(|fk| safe_tables.contains(&fk.parent_table)) 27 | .filter(|fk| safe_tables.contains(&fk.child_table)) 28 | .filter(|fk| { 29 | let pk_columns = get_primary_keys(connection, &fk.parent_table) 30 | .unwrap_or_else(|_| panic!("Error loading primary keys for `{}`", fk.parent_table)); 31 | pk_columns.len() == 1 && pk_columns[0] == fk.primary_key 32 | }) 33 | .filter(|fk| !duplicates.contains(&fk.ordered_tables())) 34 | .cloned() 35 | .collect() 36 | } 37 | -------------------------------------------------------------------------------- /wundergraph_cli/src/infer_schema_internals/mod.rs: -------------------------------------------------------------------------------- 1 | mod data_structures; 2 | mod foreign_keys; 3 | mod inference; 4 | mod table_data; 5 | 6 | #[cfg(any(feature = "mysql", feature = "postgres"))] 7 | mod information_schema; 8 | #[cfg(feature = "mysql")] 9 | mod mysql; 10 | #[cfg(feature = "postgres")] 11 | mod pg; 12 | #[cfg(feature = "sqlite")] 13 | mod sqlite; 14 | 15 | pub use self::data_structures::*; 16 | pub use self::foreign_keys::*; 17 | pub use self::inference::*; 18 | pub use self::table_data::*; 19 | -------------------------------------------------------------------------------- /wundergraph_cli/src/infer_schema_internals/pg.rs: -------------------------------------------------------------------------------- 1 | use std::error::Error; 2 | use std::io::{stderr, Write}; 3 | 4 | use super::data_structures::*; 5 | 6 | pub fn determine_column_type(attr: &ColumnInformation) -> Result> { 7 | let is_array = attr.type_name.starts_with('_'); 8 | let tpe = if is_array { 9 | &attr.type_name[1..] 10 | } else { 11 | &attr.type_name 12 | }; 13 | 14 | let diesel_alias_without_postgres_coercion = match &*tpe.to_lowercase() { 15 | "varchar" | "citext" => Some(tpe), 16 | _ => None, 17 | }; 18 | 19 | // Postgres doesn't coerce varchar[] to text[] so print out a message to inform 20 | // the user. 21 | if let (true, Some(tpe)) = (is_array, diesel_alias_without_postgres_coercion) { 22 | writeln!( 23 | &mut stderr(), 24 | "The column `{}` is of type `{}[]`. This will cause problems when using Diesel. You should consider changing the column type to `text[]`.", 25 | attr.column_name, 26 | tpe 27 | )?; 28 | } 29 | 30 | Ok(ColumnType { 31 | rust_name: capitalize(tpe), 32 | is_array, 33 | is_nullable: attr.nullable, 34 | is_unsigned: false, 35 | }) 36 | } 37 | 38 | fn capitalize(name: &str) -> String { 39 | name[..1].to_uppercase() + &name[1..] 40 | } 41 | -------------------------------------------------------------------------------- /wundergraph_cli/src/infer_schema_internals/table_data.rs: -------------------------------------------------------------------------------- 1 | use diesel::backend::Backend; 2 | use diesel::deserialize::FromSqlRow; 3 | use diesel::*; 4 | use std::fmt; 5 | use std::str::FromStr; 6 | 7 | use super::data_structures::ColumnDefinition; 8 | 9 | #[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord)] 10 | pub struct TableName { 11 | pub name: String, 12 | pub schema: Option, 13 | } 14 | 15 | impl TableName { 16 | pub fn from_name>(name: T) -> Self { 17 | Self { 18 | name: name.into(), 19 | schema: None, 20 | } 21 | } 22 | 23 | pub fn new(name: T, schema: U) -> Self 24 | where 25 | T: Into, 26 | U: Into, 27 | { 28 | Self { 29 | name: name.into(), 30 | schema: Some(schema.into()), 31 | } 32 | } 33 | 34 | #[cfg(any(feature = "postgres", feature = "mysql"))] 35 | pub fn strip_schema_if_matches(&mut self, schema: &str) { 36 | if self.schema.as_ref().map(|s| &**s) == Some(schema) { 37 | self.schema = None; 38 | } 39 | } 40 | } 41 | 42 | impl Queryable for TableName 43 | where 44 | DB: Backend, 45 | (String, String): FromSqlRow, 46 | { 47 | type Row = (String, String); 48 | 49 | fn build((name, schema): Self::Row) -> Self { 50 | Self::new(name, schema) 51 | } 52 | } 53 | 54 | impl fmt::Display for TableName { 55 | fn fmt(&self, out: &mut fmt::Formatter<'_>) -> Result<(), fmt::Error> { 56 | match self.schema { 57 | Some(ref schema_name) => write!(out, "{}.{}", schema_name, self.name), 58 | None => write!(out, "{}", self.name), 59 | } 60 | } 61 | } 62 | 63 | #[derive(Debug)] 64 | #[allow(missing_copy_implementations)] 65 | pub enum Never {} 66 | 67 | impl FromStr for TableName { 68 | type Err = Never; 69 | 70 | fn from_str(table_name: &str) -> Result { 71 | let mut parts = table_name.split('.'); 72 | match (parts.next(), parts.next()) { 73 | (Some(schema), Some(name)) => Ok(Self::new(name, schema)), 74 | _ => Ok(Self::from_name(table_name)), 75 | } 76 | } 77 | } 78 | 79 | #[derive(Debug)] 80 | pub struct TableData { 81 | pub name: TableName, 82 | pub primary_key: Vec, 83 | pub column_data: Vec, 84 | pub docs: String, 85 | } 86 | -------------------------------------------------------------------------------- /wundergraph_cli/src/main.rs: -------------------------------------------------------------------------------- 1 | #![deny(missing_debug_implementations, missing_copy_implementations)] 2 | #![warn( 3 | clippy::option_unwrap_used, 4 | clippy::result_unwrap_used, 5 | clippy::wrong_pub_self_convention, 6 | clippy::mut_mut, 7 | clippy::non_ascii_literal, 8 | clippy::similar_names, 9 | clippy::unicode_not_nfc, 10 | clippy::enum_glob_use, 11 | clippy::if_not_else, 12 | clippy::items_after_statements, 13 | clippy::used_underscore_binding, 14 | clippy::cargo_common_metadata, 15 | clippy::dbg_macro, 16 | clippy::doc_markdown, 17 | clippy::filter_map, 18 | clippy::map_flatten, 19 | clippy::match_same_arms, 20 | clippy::needless_borrow, 21 | clippy::needless_pass_by_value, 22 | clippy::option_map_unwrap_or, 23 | clippy::option_map_unwrap_or_else, 24 | clippy::redundant_clone, 25 | clippy::result_map_unwrap_or_else, 26 | clippy::unnecessary_unwrap, 27 | clippy::unseparated_literal_suffix, 28 | clippy::wildcard_dependencies 29 | )] 30 | 31 | #[macro_use] 32 | extern crate diesel; 33 | 34 | use structopt::StructOpt; 35 | 36 | mod database; 37 | mod infer_schema_internals; 38 | mod print_schema; 39 | 40 | use crate::database::InferConnection; 41 | 42 | #[derive(StructOpt, Debug)] 43 | #[structopt(name = "wundergraph")] 44 | #[allow(clippy::result_unwrap_used)] 45 | enum Wundergraph { 46 | #[structopt(name = "print-schema")] 47 | PrintSchema { 48 | database_url: String, 49 | schema: Option, 50 | }, 51 | } 52 | 53 | fn main() { 54 | match Wundergraph::from_args() { 55 | Wundergraph::PrintSchema { 56 | database_url, 57 | schema, 58 | } => { 59 | let conn = InferConnection::establish(&database_url).unwrap_or_else(|_| { 60 | panic!("Unable to connect to database with url: {}", database_url) 61 | }); 62 | print_schema::print( 63 | &conn, 64 | schema.as_ref().map(|s| s as &str), 65 | &mut std::io::stdout(), 66 | ) 67 | } 68 | .expect("Failed to infer the schema"), 69 | } 70 | } 71 | -------------------------------------------------------------------------------- /wundergraph_cli/src/print_schema/snapshots/wundergraph_cli__print_schema__tests__infer_schema@postgres.snap: -------------------------------------------------------------------------------- 1 | --- 2 | source: wundergraph_cli/src/print_schema/mod.rs 3 | expression: "&s" 4 | --- 5 | use wundergraph::query_builder::types::{HasMany, HasOne}; 6 | use wundergraph::scalar::WundergraphScalarValue; 7 | use wundergraph::WundergraphEntity; 8 | 9 | table! { 10 | infer_test.comments (id) { 11 | id -> Int4, 12 | post -> Nullable, 13 | commenter -> Nullable, 14 | content -> Text, 15 | } 16 | } 17 | 18 | table! { 19 | infer_test.posts (id) { 20 | id -> Int4, 21 | author -> Nullable, 22 | title -> Text, 23 | datetime -> Nullable, 24 | content -> Nullable, 25 | } 26 | } 27 | 28 | table! { 29 | infer_test.users (id) { 30 | id -> Int4, 31 | name -> Text, 32 | } 33 | } 34 | 35 | allow_tables_to_appear_in_same_query!( 36 | comments, 37 | posts, 38 | users, 39 | ); 40 | 41 | 42 | #[derive(Clone, Debug, Identifiable, WundergraphEntity)] 43 | #[table_name = "comments"] 44 | #[primary_key(id)] 45 | pub struct Comment { 46 | id: i32, 47 | post: Option>, 48 | commenter: Option>, 49 | content: String, 50 | } 51 | 52 | #[derive(Clone, Debug, Identifiable, WundergraphEntity)] 53 | #[table_name = "posts"] 54 | #[primary_key(id)] 55 | pub struct Post { 56 | id: i32, 57 | author: Option>, 58 | title: String, 59 | datetime: Option, 60 | content: Option, 61 | comments: HasMany, 62 | } 63 | 64 | #[derive(Clone, Debug, Identifiable, WundergraphEntity)] 65 | #[table_name = "users"] 66 | #[primary_key(id)] 67 | pub struct User { 68 | id: i32, 69 | name: String, 70 | comments: HasMany, 71 | posts: HasMany, 72 | } 73 | 74 | 75 | 76 | wundergraph::query_object!{ 77 | Query { 78 | Comment, 79 | Post, 80 | User, 81 | } 82 | } 83 | 84 | 85 | #[derive(Insertable, juniper::GraphQLInputObject, Clone, Debug)] 86 | #[graphql(scalar = "WundergraphScalarValue")] 87 | #[table_name = "comments"] 88 | pub struct NewComment { 89 | post: Option, 90 | commenter: Option, 91 | content: String, 92 | } 93 | 94 | #[derive(AsChangeset, Identifiable, juniper::GraphQLInputObject, Clone, Debug)] 95 | #[graphql(scalar = "WundergraphScalarValue")] 96 | #[table_name = "comments"] 97 | #[primary_key(id)] 98 | pub struct CommentChangeset { 99 | id: i32, 100 | post: Option, 101 | commenter: Option, 102 | content: String, 103 | } 104 | 105 | #[derive(Insertable, juniper::GraphQLInputObject, Clone, Debug)] 106 | #[graphql(scalar = "WundergraphScalarValue")] 107 | #[table_name = "posts"] 108 | pub struct NewPost { 109 | author: Option, 110 | title: String, 111 | datetime: Option, 112 | content: Option, 113 | } 114 | 115 | #[derive(AsChangeset, Identifiable, juniper::GraphQLInputObject, Clone, Debug)] 116 | #[graphql(scalar = "WundergraphScalarValue")] 117 | #[table_name = "posts"] 118 | #[primary_key(id)] 119 | pub struct PostChangeset { 120 | id: i32, 121 | author: Option, 122 | title: String, 123 | datetime: Option, 124 | content: Option, 125 | } 126 | 127 | #[derive(Insertable, juniper::GraphQLInputObject, Clone, Debug)] 128 | #[graphql(scalar = "WundergraphScalarValue")] 129 | #[table_name = "users"] 130 | pub struct NewUser { 131 | name: String, 132 | } 133 | 134 | #[derive(AsChangeset, Identifiable, juniper::GraphQLInputObject, Clone, Debug)] 135 | #[graphql(scalar = "WundergraphScalarValue")] 136 | #[table_name = "users"] 137 | #[primary_key(id)] 138 | pub struct UserChangeset { 139 | id: i32, 140 | name: String, 141 | } 142 | 143 | wundergraph::mutation_object!{ 144 | Mutation{ 145 | Comment(insert = NewComment, update = CommentChangeset, ), 146 | Post(insert = NewPost, update = PostChangeset, ), 147 | User(insert = NewUser, update = UserChangeset, ), 148 | } 149 | } 150 | 151 | 152 | -------------------------------------------------------------------------------- /wundergraph_cli/src/print_schema/snapshots/wundergraph_cli__print_schema__tests__infer_schema@sqlite.snap: -------------------------------------------------------------------------------- 1 | --- 2 | source: wundergraph_cli/src/print_schema/mod.rs 3 | expression: "&s" 4 | --- 5 | use wundergraph::query_builder::types::{HasMany, HasOne}; 6 | use wundergraph::scalar::WundergraphScalarValue; 7 | use wundergraph::WundergraphEntity; 8 | 9 | table! { 10 | comments (id) { 11 | id -> Integer, 12 | post -> Nullable, 13 | commenter -> Nullable, 14 | content -> Text, 15 | } 16 | } 17 | 18 | table! { 19 | posts (id) { 20 | id -> Integer, 21 | author -> Nullable, 22 | title -> Text, 23 | datetime -> Nullable, 24 | content -> Nullable, 25 | } 26 | } 27 | 28 | table! { 29 | users (id) { 30 | id -> Integer, 31 | name -> Text, 32 | } 33 | } 34 | 35 | allow_tables_to_appear_in_same_query!( 36 | comments, 37 | posts, 38 | users, 39 | ); 40 | 41 | 42 | #[derive(Clone, Debug, Identifiable, WundergraphEntity)] 43 | #[table_name = "comments"] 44 | #[primary_key(id)] 45 | pub struct Comment { 46 | id: i32, 47 | post: Option>, 48 | commenter: Option>, 49 | content: String, 50 | } 51 | 52 | #[derive(Clone, Debug, Identifiable, WundergraphEntity)] 53 | #[table_name = "posts"] 54 | #[primary_key(id)] 55 | pub struct Post { 56 | id: i32, 57 | author: Option>, 58 | title: String, 59 | datetime: Option, 60 | content: Option, 61 | comments: HasMany, 62 | } 63 | 64 | #[derive(Clone, Debug, Identifiable, WundergraphEntity)] 65 | #[table_name = "users"] 66 | #[primary_key(id)] 67 | pub struct User { 68 | id: i32, 69 | name: String, 70 | comments: HasMany, 71 | posts: HasMany, 72 | } 73 | 74 | 75 | 76 | wundergraph::query_object!{ 77 | Query { 78 | Comment, 79 | Post, 80 | User, 81 | } 82 | } 83 | 84 | 85 | #[derive(Insertable, juniper::GraphQLInputObject, Clone, Debug)] 86 | #[graphql(scalar = "WundergraphScalarValue")] 87 | #[table_name = "comments"] 88 | pub struct NewComment { 89 | post: Option, 90 | commenter: Option, 91 | content: String, 92 | } 93 | 94 | #[derive(AsChangeset, Identifiable, juniper::GraphQLInputObject, Clone, Debug)] 95 | #[graphql(scalar = "WundergraphScalarValue")] 96 | #[table_name = "comments"] 97 | #[primary_key(id)] 98 | pub struct CommentChangeset { 99 | id: i32, 100 | post: Option, 101 | commenter: Option, 102 | content: String, 103 | } 104 | 105 | #[derive(Insertable, juniper::GraphQLInputObject, Clone, Debug)] 106 | #[graphql(scalar = "WundergraphScalarValue")] 107 | #[table_name = "posts"] 108 | pub struct NewPost { 109 | author: Option, 110 | title: String, 111 | datetime: Option, 112 | content: Option, 113 | } 114 | 115 | #[derive(AsChangeset, Identifiable, juniper::GraphQLInputObject, Clone, Debug)] 116 | #[graphql(scalar = "WundergraphScalarValue")] 117 | #[table_name = "posts"] 118 | #[primary_key(id)] 119 | pub struct PostChangeset { 120 | id: i32, 121 | author: Option, 122 | title: String, 123 | datetime: Option, 124 | content: Option, 125 | } 126 | 127 | #[derive(Insertable, juniper::GraphQLInputObject, Clone, Debug)] 128 | #[graphql(scalar = "WundergraphScalarValue")] 129 | #[table_name = "users"] 130 | pub struct NewUser { 131 | name: String, 132 | } 133 | 134 | #[derive(AsChangeset, Identifiable, juniper::GraphQLInputObject, Clone, Debug)] 135 | #[graphql(scalar = "WundergraphScalarValue")] 136 | #[table_name = "users"] 137 | #[primary_key(id)] 138 | pub struct UserChangeset { 139 | id: i32, 140 | name: String, 141 | } 142 | 143 | wundergraph::mutation_object!{ 144 | Mutation{ 145 | Comment(insert = NewComment, update = CommentChangeset, ), 146 | Post(insert = NewPost, update = PostChangeset, ), 147 | User(insert = NewUser, update = UserChangeset, ), 148 | } 149 | } 150 | 151 | 152 | -------------------------------------------------------------------------------- /wundergraph_cli/src/print_schema/snapshots/wundergraph_cli__print_schema__tests__round_trip_test__mutation.snap: -------------------------------------------------------------------------------- 1 | --- 2 | source: wundergraph_cli/src/print_schema/mod.rs 3 | expression: r 4 | --- 5 | { 6 | "data": { 7 | "CreateUser": { 8 | "id": 1, 9 | "name": "Max" 10 | } 11 | } 12 | } 13 | -------------------------------------------------------------------------------- /wundergraph_cli/src/print_schema/snapshots/wundergraph_cli__print_schema__tests__round_trip_test__query_1.snap: -------------------------------------------------------------------------------- 1 | --- 2 | source: wundergraph_cli/src/print_schema/mod.rs 3 | expression: r 4 | --- 5 | { 6 | "data": { 7 | "Users": [] 8 | } 9 | } 10 | -------------------------------------------------------------------------------- /wundergraph_cli/src/print_schema/snapshots/wundergraph_cli__print_schema__tests__round_trip_test__query_2.snap: -------------------------------------------------------------------------------- 1 | --- 2 | source: wundergraph_cli/src/print_schema/mod.rs 3 | expression: r 4 | --- 5 | { 6 | "data": { 7 | "Users": [ 8 | { 9 | "id": 1, 10 | "name": "Max" 11 | } 12 | ] 13 | } 14 | } 15 | -------------------------------------------------------------------------------- /wundergraph_cli/src/print_schema/template_main.rs: -------------------------------------------------------------------------------- 1 | #[macro_use] extern crate diesel; 2 | 3 | use actix_web::web::Data; 4 | use actix_web::web::Json; 5 | use actix_web::middleware; 6 | use actix_web::web; 7 | use actix_web::App; 8 | use actix_web::HttpResponse; 9 | use actix_web::HttpServer; 10 | use diesel::{conn}; 11 | use diesel::r2d2::ConnectionManager; 12 | use diesel::r2d2::Pool; 13 | use diesel::r2d2::PooledConnection; 14 | use diesel::r2d2::CustomizeConnection; 15 | use diesel::Connection; 16 | use diesel::connection::SimpleConnection; 17 | use juniper::graphiql::graphiql_source; 18 | use juniper::http::GraphQLRequest; 19 | use serde::Deserialize; 20 | use serde::Serialize; 21 | use std::sync::Arc; 22 | use wundergraph::scalar::WundergraphScalarValue; 23 | 24 | pub mod api; 25 | 26 | pub type Schema = juniper::RootNode< 27 | 'static, 28 | self::api::Query>>, 29 | self::api::Mutation>>, 30 | WundergraphScalarValue, 31 | >; 32 | 33 | // actix integration stuff 34 | #[derive(Serialize, Deserialize, Debug)] 35 | pub struct GraphQLData(GraphQLRequest); 36 | 37 | #[derive(Clone)] 38 | struct AppState {{ 39 | schema: Arc>, 40 | pool: Arc>>, 41 | }} 42 | 43 | fn graphiql() -> HttpResponse {{ 44 | let html = graphiql_source("/graphql"); 45 | HttpResponse::Ok() 46 | .content_type("text/html; charset=utf-8") 47 | .body(html) 48 | }} 49 | 50 | fn graphql( 51 | Json(GraphQLData(data)): Json, 52 | st: Data, 53 | ) -> Result {{ 54 | let ctx = st.get_ref().pool.get()?; 55 | let res = data.execute(&st.get_ref().schema, &ctx); 56 | Ok(HttpResponse::Ok() 57 | .content_type("application/json") 58 | .body(serde_json::to_string(&res)?)) 59 | }} 60 | 61 | #[derive(Debug)] 62 | struct ConnectionHandler; 63 | 64 | impl CustomizeConnection<{conn}, E> for ConnectionHandler {{ 65 | fn on_acquire(&self, conn: &mut {conn}) -> Result<(), E> {{ 66 | Ok(conn.begin_test_transaction().unwrap()) 67 | }} 68 | }} 69 | 70 | fn main() {{ 71 | let manager = ConnectionManager::<{conn}>::new("{db_url}"); 72 | let pool = Pool::builder() 73 | .max_size(1) 74 | .connection_customizer(Box::new(ConnectionHandler)) 75 | .build(manager) 76 | .expect("Failed to init pool"); 77 | {{ 78 | let conn = pool.get().unwrap(); 79 | conn.batch_execute("{migrations}").unwrap(); 80 | }} 81 | 82 | let query = self::api::Query::default(); 83 | let mutation = self::api::Mutation::default(); 84 | let schema = Schema::new(query, mutation); 85 | 86 | let schema = Arc::new(schema); 87 | let pool = Arc::new(pool); 88 | let data = AppState {{ schema, pool }}; 89 | 90 | let url = "{listen_url}"; 91 | 92 | println!("Started http server: http://{{}}", url); 93 | 94 | HttpServer::new(move || {{ 95 | App::new() 96 | .data(data.clone()) 97 | .wrap(middleware::Logger::default()) 98 | .route("/graphql", web::get().to(graphql)) 99 | .route("/graphql", web::post().to(graphql)) 100 | .route("/graphiql", web::get().to(graphiql)) 101 | .default_service(web::route().to(|| {{ 102 | HttpResponse::Found() 103 | .header("location", "/graphiql") 104 | .finish() 105 | }})) 106 | }}) 107 | .bind(&url) 108 | .expect("Failed to start server") 109 | .run() 110 | .unwrap(); 111 | }} 112 | -------------------------------------------------------------------------------- /wundergraph_derive/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "wundergraph_derive" 3 | version = "0.1.0" 4 | authors = ["Georg Semmler "] 5 | license = "MIT OR Apache-2.0" 6 | repository = "https://github.com/weiznich/wundergraph" 7 | readme = "../README.md" 8 | keywords = ["GraphQL", "ORM", "PostgreSQL", "SQLite"] 9 | categories = ["database", "web-programming"] 10 | description = "Internal proc macro implementation for wundergraph" 11 | edition = "2018" 12 | 13 | [dependencies] 14 | syn = { version = "1", features = ["full", "fold", "extra-traits"] } 15 | quote = "1" 16 | proc-macro2 = "1" 17 | 18 | [lib] 19 | proc-macro = true 20 | 21 | [features] 22 | default = [] 23 | nightly = ["proc-macro2/nightly"] 24 | postgres = [] 25 | sqlite = [] 26 | debug = [] 27 | -------------------------------------------------------------------------------- /wundergraph_derive/src/build_filter_helper.rs: -------------------------------------------------------------------------------- 1 | use crate::diagnostic_shim::Diagnostic; 2 | use crate::model::Model; 3 | use crate::utils::wrap_in_dummy_mod; 4 | use proc_macro2::TokenStream; 5 | use quote::quote; 6 | use syn::parse_quote; 7 | 8 | pub fn derive(item: &syn::DeriveInput) -> Result { 9 | let model = Model::from_item(item)?; 10 | 11 | let pg = if cfg!(feature = "postgres") { 12 | Some(derive_non_table_filter( 13 | &model, 14 | item, 15 | "e!(diesel::pg::Pg), 16 | )?) 17 | } else { 18 | None 19 | }; 20 | 21 | let sqlite = if cfg!(feature = "sqlite") { 22 | Some(derive_non_table_filter( 23 | &model, 24 | item, 25 | "e!(diesel::sqlite::Sqlite), 26 | )?) 27 | } else { 28 | None 29 | }; 30 | 31 | Ok(wrap_in_dummy_mod( 32 | "build_filter_helper", 33 | &model.name, 34 | "e! { 35 | #pg 36 | #sqlite 37 | }, 38 | )) 39 | } 40 | 41 | pub fn derive_non_table_filter( 42 | model: &Model, 43 | item: &syn::DeriveInput, 44 | backend: &TokenStream, 45 | ) -> Result { 46 | let (_, ty_generics, _) = item.generics.split_for_impl(); 47 | let mut generics = item.generics.clone(); 48 | generics 49 | .params 50 | .push(parse_quote!(__Ctx: wundergraph::WundergraphContext + 'static)); 51 | { 52 | let where_clause = generics.where_clause.get_or_insert(parse_quote!(where)); 53 | where_clause 54 | .predicates 55 | .push(parse_quote!(<__Ctx as wundergraph::WundergraphContext>::Connection: wundergraph::diesel::Connection)); 56 | } 57 | 58 | let (impl_generics, _, where_clause) = generics.split_for_impl(); 59 | let table = model.table_type()?; 60 | let table = "e!(#table::table); 61 | let struct_type = &model.name; 62 | let filter = "e! { 63 | as wundergraph::query_builder::selection::filter::CreateFilter>::Filter 65 | }; 66 | 67 | Ok(quote! { 68 | impl#impl_generics wundergraph::query_builder::selection::filter::BuildFilterHelper< 69 | #backend, 70 | #filter, 71 | __Ctx, 72 | > for #table 73 | #where_clause 74 | { 75 | type Ret = Box>; 76 | const FIELD_COUNT: usize = as wundergraph::query_builder::selection::filter::InnerFilter>::FIELD_COUNT; 81 | 82 | fn into_filter( 83 | f: #filter, 84 | ) -> std::option::Option { 85 | use wundergraph::query_builder::selection::filter::BuildFilter; 86 | BuildFilter::<#backend>::into_filter(f) 87 | } 88 | 89 | fn from_inner_look_ahead( 90 | objs: &[(&str, wundergraph::juniper::LookAheadValue)] 91 | ) -> #filter { 92 | use wundergraph::query_builder::selection::filter::InnerFilter; 93 | wundergraph::query_builder::selection::filter::FilterBuildHelper::<#filter, #struct_type #ty_generics, #backend, __Ctx>::from_inner_look_ahead(objs).0 94 | } 95 | 96 | fn from_inner_input_value( 97 | obj: wundergraph::indexmap::IndexMap<&str, &wundergraph::juniper::InputValue>, 98 | ) -> std::option::Option<#filter> { 99 | use wundergraph::query_builder::selection::filter::InnerFilter; 100 | std::option::Option::Some( 101 | wundergraph::query_builder::selection::filter::FilterBuildHelper::<#filter, #struct_type #ty_generics, #backend, __Ctx>::from_inner_input_value(obj)?.0 102 | ) 103 | } 104 | 105 | fn to_inner_input_value( 106 | _f: &#filter, 107 | _v: &mut wundergraph::indexmap::IndexMap<&str, wundergraph::juniper::InputValue> 108 | ) { 109 | 110 | } 111 | 112 | fn register_fields<'__r>( 113 | _info: &wundergraph::juniper_ext::NameBuilder<()>, 114 | registry: &mut wundergraph::juniper::Registry<'__r, wundergraph::scalar::WundergraphScalarValue> 115 | ) -> std::vec::Vec> { 116 | use wundergraph::query_builder::selection::filter::InnerFilter; 117 | wundergraph::query_builder::selection::filter::FilterBuildHelper::<#filter, #struct_type #ty_generics, #backend, __Ctx>::register_fields(&Default::default(), registry) 118 | } 119 | } 120 | }) 121 | } 122 | -------------------------------------------------------------------------------- /wundergraph_derive/src/diagnostic_shim.rs: -------------------------------------------------------------------------------- 1 | use proc_macro2::Span; 2 | 3 | pub trait EmitErrorExt { 4 | fn emit_error(self) -> Option; 5 | } 6 | 7 | impl EmitErrorExt for Result { 8 | fn emit_error(self) -> Option { 9 | self.map_err(Diagnostic::emit).ok() 10 | } 11 | } 12 | 13 | pub trait DiagnosticShim { 14 | fn error>(self, msg: T) -> Diagnostic; 15 | fn warning>(self, msg: T) -> Diagnostic; 16 | } 17 | 18 | #[cfg(feature = "nightly")] 19 | impl DiagnosticShim for Span { 20 | fn error>(self, msg: T) -> Diagnostic { 21 | self.unstable().error(msg) 22 | } 23 | 24 | fn warning>(self, msg: T) -> Diagnostic { 25 | self.unstable().warning(msg) 26 | } 27 | } 28 | 29 | #[cfg(not(feature = "nightly"))] 30 | impl DiagnosticShim for Span { 31 | fn error>(self, msg: T) -> Diagnostic { 32 | Diagnostic::error(msg) 33 | } 34 | 35 | fn warning>(self, msg: T) -> Diagnostic { 36 | Diagnostic::warning(msg) 37 | } 38 | } 39 | 40 | #[cfg(feature = "nightly")] 41 | pub use proc_macro::Diagnostic; 42 | 43 | #[cfg(not(feature = "nightly"))] 44 | pub struct Diagnostic { 45 | message: String, 46 | level: Level, 47 | } 48 | 49 | #[cfg(not(feature = "nightly"))] 50 | impl Diagnostic { 51 | fn error>(msg: T) -> Self { 52 | Self { 53 | message: msg.into(), 54 | level: Level::Error, 55 | } 56 | } 57 | 58 | fn warning>(msg: T) -> Self { 59 | Self { 60 | message: msg.into(), 61 | level: Level::Warning, 62 | } 63 | } 64 | 65 | #[allow(unused)] 66 | pub fn help>(mut self, msg: T) -> Self { 67 | self.message.push_str("\n"); 68 | self.message.push_str(&msg.into()); 69 | self 70 | } 71 | 72 | #[allow(unused)] 73 | pub fn note(self, msg: &str) -> Self { 74 | self.help(msg) 75 | } 76 | 77 | #[allow(clippy::print_stdout)] 78 | pub fn emit(self) { 79 | match self.level { 80 | Level::Error => panic!("{}", self.message), 81 | Level::Warning => println!("{}", self.message), 82 | } 83 | } 84 | } 85 | 86 | #[cfg(not(feature = "nightly"))] 87 | enum Level { 88 | Warning, 89 | Error, 90 | } 91 | -------------------------------------------------------------------------------- /wundergraph_derive/src/field.rs: -------------------------------------------------------------------------------- 1 | use proc_macro2::{Span, TokenStream}; 2 | use quote::quote_spanned; 3 | use syn::parse_quote; 4 | use syn::spanned::Spanned; 5 | 6 | use crate::diagnostic_shim::{Diagnostic, DiagnosticShim}; 7 | use crate::meta::*; 8 | use crate::utils::*; 9 | 10 | #[derive(Debug)] 11 | pub struct Field { 12 | pub ty: syn::Type, 13 | rust_name: FieldName, 14 | graphql_name: syn::Path, 15 | sql_name: syn::Path, 16 | pub span: Span, 17 | pub doc: Option, 18 | pub deprecated: Option, 19 | flags: MetaItem, 20 | } 21 | 22 | impl Field { 23 | pub fn from_struct_field(field: &syn::Field, index: usize) -> Result { 24 | let rust_name = match field.ident { 25 | Some(ref o) => { 26 | let mut x = o.clone(); 27 | // https://github.com/rust-lang/rust/issues/47983#issuecomment-362817105 28 | x.set_span(fix_span(o.span(), Span::call_site())); 29 | FieldName::Named(x.into()) 30 | } 31 | None => FieldName::Unnamed(syn::Index { 32 | index: index as u32, 33 | // https://github.com/rust-lang/rust/issues/47312 34 | span: Span::call_site(), 35 | }), 36 | }; 37 | let span = field.span(); 38 | let doc = MetaItem::get_docs(&field.attrs); 39 | let deprecated = MetaItem::get_deprecated(&field.attrs); 40 | let flags = MetaItem::with_name(&field.attrs, "wundergraph") 41 | .unwrap_or_else(|| MetaItem::empty("wundergraph")); 42 | 43 | let sql_name = MetaItem::with_name(&field.attrs, "column_name") 44 | .ok_or_else(|| span.error("No `#[column_name = \"name\"]` annotation found")) 45 | .and_then(|i| i.ident_value()) 46 | .or_else(|_| { 47 | match rust_name { 48 | FieldName::Named(ref x) => Ok(x.clone()), 49 | FieldName::Unnamed(_) => Err(span.error("Tuple struct fields needed to be annotated with `#[column_name = \"sql_name\"]")), 50 | } 51 | })?; 52 | let graphql_name = flags 53 | .nested_item("graphql_name") 54 | .and_then(|i|i.ident_value()) 55 | .or_else(|_| { 56 | match rust_name { 57 | FieldName::Named(ref x) => Ok(x.clone()), 58 | FieldName::Unnamed(_) => Err(span.error("Tuple struct fields needed to be annotated with `#[wundergraph(graphql_name = \"sql_name\")]")), 59 | } 60 | })?; 61 | 62 | Ok(Self { 63 | ty: field.ty.clone(), 64 | rust_name, 65 | graphql_name, 66 | sql_name, 67 | flags, 68 | span, 69 | doc, 70 | deprecated, 71 | }) 72 | } 73 | pub fn rust_name(&self) -> &FieldName { 74 | &self.rust_name 75 | } 76 | 77 | pub fn graphql_name(&self) -> &syn::Path { 78 | &self.graphql_name 79 | } 80 | 81 | pub fn sql_name(&self) -> &syn::Path { 82 | &self.sql_name 83 | } 84 | } 85 | 86 | #[derive(Debug)] 87 | pub enum FieldName { 88 | Named(syn::Path), 89 | Unnamed(syn::Index), 90 | } 91 | 92 | impl FieldName { 93 | #[allow(unused)] 94 | pub fn assign(&self, expr: &syn::Expr) -> syn::FieldValue { 95 | let span = self.span(); 96 | // Parens are to work around https://github.com/rust-lang/rust/issues/47311 97 | let tokens = quote_spanned!(span=> #self: (#expr)); 98 | parse_quote!(#tokens) 99 | } 100 | 101 | pub fn access(&self) -> TokenStream { 102 | let span = self.span(); 103 | // Span of the dot is important due to 104 | // https://github.com/rust-lang/rust/issues/47312 105 | quote_spanned!(span=> .#self) 106 | } 107 | 108 | pub fn span(&self) -> Span { 109 | match *self { 110 | FieldName::Named(ref x) => x.span(), 111 | FieldName::Unnamed(ref x) => x.span(), 112 | } 113 | } 114 | } 115 | 116 | impl quote::ToTokens for FieldName { 117 | fn to_tokens(&self, tokens: &mut TokenStream) { 118 | match *self { 119 | FieldName::Named(ref x) => x.to_tokens(tokens), 120 | FieldName::Unnamed(ref x) => x.to_tokens(tokens), 121 | } 122 | } 123 | } 124 | -------------------------------------------------------------------------------- /wundergraph_derive/src/lib.rs: -------------------------------------------------------------------------------- 1 | //! A helper crate implementing a bunch of custom derives for wundergraph 2 | #![deny(missing_debug_implementations, missing_copy_implementations)] 3 | #![warn( 4 | missing_docs, 5 | clippy::option_unwrap_used, 6 | clippy::result_unwrap_used, 7 | clippy::print_stdout, 8 | clippy::wrong_pub_self_convention, 9 | clippy::mut_mut, 10 | clippy::non_ascii_literal, 11 | clippy::similar_names, 12 | clippy::unicode_not_nfc, 13 | clippy::enum_glob_use, 14 | clippy::if_not_else, 15 | clippy::items_after_statements, 16 | clippy::used_underscore_binding, 17 | clippy::cargo_common_metadata, 18 | clippy::dbg_macro, 19 | clippy::doc_markdown, 20 | clippy::filter_map, 21 | clippy::map_flatten, 22 | clippy::match_same_arms, 23 | clippy::needless_borrow, 24 | clippy::needless_pass_by_value, 25 | clippy::option_map_unwrap_or, 26 | clippy::option_map_unwrap_or_else, 27 | clippy::redundant_clone, 28 | clippy::result_map_unwrap_or_else, 29 | clippy::unnecessary_unwrap, 30 | clippy::unseparated_literal_suffix, 31 | clippy::wildcard_dependencies 32 | )] 33 | extern crate proc_macro; 34 | 35 | mod diagnostic_shim; 36 | mod field; 37 | mod meta; 38 | mod model; 39 | mod resolved_at_shim; 40 | mod utils; 41 | 42 | pub(crate) mod belonging_to; 43 | pub(crate) mod build_filter_helper; 44 | mod wundergraph_entity; 45 | mod wundergraph_filter; 46 | pub(crate) mod wundergraph_value; 47 | 48 | use self::diagnostic_shim::Diagnostic; 49 | use proc_macro::TokenStream; 50 | 51 | /// A custom derive to implement all wundergraph related traits for a entity 52 | /// Using this trait implies internally `#[derive(WundergraphBelongsTo)]` 53 | /// and `#[derive(BuildFilterHelper)]` 54 | /// 55 | /// # Type attributes 56 | /// * **Required**: 57 | /// * `#[table_name = "diesel_table_mod"]`: Name of the underlying diesel table. 58 | /// * Optional: 59 | /// * `#[primary_key(primary_key_name)]`: Names the fields that represent 60 | /// the primary key on the underlying database table. Set 61 | /// to `id` if not given (In this case a primary key field named `id` must exist) 62 | /// * `/// Documentation`/`#[doc = "Documentation"]`: Set as GraphQL 63 | /// description text. 64 | /// 65 | /// # Field attributes 66 | /// All attributes are optional. If no attributes are given the field name needs to 67 | /// match the name of the field in the corresponding diesel `table!` and is used 68 | /// a GraphQL field name 69 | /// 70 | /// * `#[column_name = "other_name"]`: Use the given name instead of the field 71 | /// name as column name for calling into diesels `table!` 72 | /// * `#[wundergraph(graphql_name = "Foo")]`: Set the GraphQL name of the field 73 | /// to the given name. If not set the field name is used as name. 74 | /// * `#[deprecated(note = "Some Text")]`: Set as GraphQL deprecation notice 75 | /// * `/// Documentation`/`#[doc = "Documentation"]`: Set as GraphQL 76 | /// description text. 77 | #[proc_macro_derive(WundergraphEntity, attributes(wundergraph, table_name, primary_key))] 78 | pub fn derive_wundergraph_entity(input: TokenStream) -> TokenStream { 79 | expand_derive(input, wundergraph_entity::derive) 80 | } 81 | 82 | /// A custom derive to add support for a custom enum type 83 | /// 84 | /// # Type attributes 85 | /// * **Required**: 86 | /// * `#[sql_type = "DieselSqlType"]`: The sql type the enum maps on diesel side 87 | /// 88 | /// # Variant attributes 89 | /// All field attributes are optional if no attributes are given the variant 90 | /// name is used as GraphQL name 91 | /// 92 | /// * `#[graphql(name = "CustomVariantName")]`: Set the name of a enum variant 93 | /// to the given custom name. 94 | #[proc_macro_derive(WundergraphValue, attributes(sql_type, graphql))] 95 | pub fn derive_wundergraph_value(input: TokenStream) -> TokenStream { 96 | expand_derive(input, wundergraph_value::derive) 97 | } 98 | 99 | /// A custom derive to implement the `BuildFilterHelper` trait 100 | /// 101 | /// # Type attributes 102 | /// * **Required**: 103 | /// * `#[table_name = "diesel_table_mod"]`: Name of the underlying diesel table. 104 | #[proc_macro_derive(BuildFilterHelper, attributes(table_name))] 105 | pub fn derive_build_filter_helper(input: TokenStream) -> TokenStream { 106 | expand_derive(input, build_filter_helper::derive) 107 | } 108 | 109 | /// A custom derive to implement the `WundergraphBelongsTo` trait 110 | /// for all `HasOne` fields of a given entity 111 | /// 112 | /// # Type attributes 113 | /// * **Required**: 114 | /// * `#[table_name = "diesel_table_mod"]`: Name of the underlying diesel table. 115 | /// 116 | /// # Field attributes 117 | /// All attributes are optional. If no attributes are given the foreign key field 118 | /// name needs to match the name of the field in the corresponding diesel `table!` 119 | /// 120 | /// * `#[column_name = "other_name"]`: Use the given name instead of the field 121 | #[proc_macro_derive(WundergraphBelongsTo, attributes(table_name))] 122 | pub fn derive_belonging_to(input: TokenStream) -> TokenStream { 123 | expand_derive(input, belonging_to::derive) 124 | } 125 | 126 | #[doc(hidden)] 127 | #[proc_macro_derive(WundergraphFilter, attributes(wundergraph, table_name))] 128 | pub fn derive_wundergraph_filter(input: TokenStream) -> TokenStream { 129 | expand_derive(input, wundergraph_filter::derive) 130 | } 131 | 132 | fn expand_derive( 133 | input: TokenStream, 134 | f: fn(&syn::DeriveInput) -> Result, 135 | ) -> TokenStream { 136 | let item = syn::parse(input).expect("Failed to parse item"); 137 | match f(&item) { 138 | Ok(x) => x.into(), 139 | Err(e) => { 140 | e.emit(); 141 | "".parse().expect("Failed to parse item") 142 | } 143 | } 144 | } 145 | -------------------------------------------------------------------------------- /wundergraph_derive/src/model.rs: -------------------------------------------------------------------------------- 1 | use proc_macro2::Span; 2 | use syn; 3 | 4 | use crate::diagnostic_shim::*; 5 | use crate::field::*; 6 | use crate::meta::*; 7 | 8 | pub struct Model { 9 | pub name: syn::Ident, 10 | fields: Vec, 11 | flags: MetaItem, 12 | table_name: Option, 13 | pub docs: Option, 14 | primary_keys: Vec, 15 | } 16 | 17 | impl Model { 18 | pub fn from_item(item: &syn::DeriveInput) -> Result { 19 | let table_name = 20 | MetaItem::with_name(&item.attrs, "table_name").map(|m| m.expect_ident_value()); 21 | let fields = fields_from_item_data(&item.data)?; 22 | let flags = MetaItem::with_name(&item.attrs, "wundergraph") 23 | .unwrap_or_else(|| MetaItem::empty("wundergraph")); 24 | let docs = MetaItem::get_docs(&item.attrs); 25 | let primary_keys = MetaItem::with_name(&item.attrs, "primary_key").map_or_else( 26 | || { 27 | Ok(vec![syn::Path::from(syn::Ident::new( 28 | "id", 29 | Span::call_site(), 30 | ))]) 31 | }, 32 | |m| m.nested()?.map(|m| m.path()).collect(), 33 | )?; 34 | Ok(Self { 35 | name: item.ident.clone(), 36 | fields, 37 | flags, 38 | table_name, 39 | docs, 40 | primary_keys, 41 | }) 42 | } 43 | 44 | pub fn fields(&self) -> &[Field] { 45 | &self.fields 46 | } 47 | 48 | pub fn table_type(&self) -> Result { 49 | self.table_name.clone().map_or_else( 50 | || { 51 | self.flags 52 | .nested_item("table_name") 53 | .and_then(|t| t.ident_value()) 54 | }, 55 | Ok, 56 | ) 57 | } 58 | 59 | pub fn primary_key(&self) -> &[syn::Path] { 60 | &self.primary_keys 61 | } 62 | 63 | pub fn filter_type(&self) -> Option { 64 | self.flags.get_flag("filter").ok() 65 | } 66 | } 67 | 68 | fn fields_from_item_data(data: &syn::Data) -> Result, Diagnostic> { 69 | use syn::Data::*; 70 | 71 | let struct_data = match *data { 72 | Struct(ref d) => d, 73 | _ => return Err(Span::call_site().error("This derive can only be used on structs")), 74 | }; 75 | struct_data 76 | .fields 77 | .iter() 78 | .enumerate() 79 | .map(|(i, f)| Field::from_struct_field(f, i)) 80 | .collect() 81 | } 82 | -------------------------------------------------------------------------------- /wundergraph_derive/src/resolved_at_shim.rs: -------------------------------------------------------------------------------- 1 | use proc_macro2::Span; 2 | 3 | pub trait ResolvedAtExt { 4 | fn resolved_at(self, span: Span) -> Span; 5 | } 6 | 7 | #[cfg(feature = "nightly")] 8 | impl ResolvedAtExt for Span { 9 | fn resolved_at(self, span: Span) -> Span { 10 | self.unstable().resolved_at(span.unstable()).into() 11 | } 12 | } 13 | 14 | #[cfg(not(feature = "nightly"))] 15 | impl ResolvedAtExt for Span { 16 | fn resolved_at(self, _: Span) -> Span { 17 | self 18 | } 19 | } 20 | -------------------------------------------------------------------------------- /wundergraph_derive/src/utils.rs: -------------------------------------------------------------------------------- 1 | use proc_macro2::{Ident, Span, TokenStream}; 2 | use quote::quote; 3 | use syn::*; 4 | 5 | pub fn wrap_in_dummy_mod( 6 | name_place_holder: &str, 7 | ident: &Ident, 8 | item: &TokenStream, 9 | ) -> TokenStream { 10 | let call_site = root_span(Span::call_site()); 11 | let const_name = Ident::new( 12 | &format!("_impl_{}_for_{}", name_place_holder, ident.to_string()).to_uppercase(), 13 | call_site, 14 | ); 15 | quote! { 16 | #[doc(hidden)] 17 | #[allow(non_snake_case)] 18 | const #const_name: () = { 19 | extern crate std; 20 | #item 21 | }; 22 | } 23 | } 24 | 25 | pub fn inner_of_option_ty(ty: &Type) -> &Type { 26 | inner_ty_arg(ty, "Option", 0).unwrap_or(ty) 27 | } 28 | 29 | pub fn inner_of_box_ty(ty: &Type) -> &Type { 30 | inner_ty_arg(ty, "Box", 0).unwrap_or(ty) 31 | } 32 | 33 | pub fn is_box_ty(ty: &Type) -> bool { 34 | inner_ty_arg(ty, "Box", 0).is_some() 35 | } 36 | 37 | pub fn is_has_many(ty: &Type) -> bool { 38 | inner_ty_arg(inner_of_option_ty(ty), "HasMany", 0).is_some() 39 | } 40 | 41 | pub fn inner_ty_args<'a>( 42 | ty: &'a Type, 43 | type_name: &str, 44 | ) -> Option<&'a syn::punctuated::Punctuated> { 45 | use syn::PathArguments::AngleBracketed; 46 | 47 | match *ty { 48 | Type::Path(ref ty) => { 49 | let last_segment = ty 50 | .path 51 | .segments 52 | .iter() 53 | .last() 54 | .expect("Path without any segments"); 55 | match last_segment.arguments { 56 | AngleBracketed(ref args) if last_segment.ident == type_name => Some(&args.args), 57 | _ => None, 58 | } 59 | } 60 | _ => None, 61 | } 62 | } 63 | 64 | pub fn inner_ty_arg<'a>(ty: &'a Type, type_name: &str, index: usize) -> Option<&'a Type> { 65 | inner_ty_args(ty, type_name).and_then(|args| match args[index] { 66 | GenericArgument::Type(ref ty) => Some(ty), 67 | _ => None, 68 | }) 69 | } 70 | 71 | pub fn fix_span(maybe_bad_span: Span, fallback: Span) -> Span { 72 | let bad_span_debug = "Span(Span { lo: BytePos(0), hi: BytePos(0), ctxt: #0 })"; 73 | if format!("{:?}", maybe_bad_span) == bad_span_debug { 74 | fallback 75 | } else { 76 | maybe_bad_span 77 | } 78 | } 79 | 80 | #[cfg(not(feature = "nightly"))] 81 | fn root_span(span: Span) -> Span { 82 | span 83 | } 84 | 85 | #[cfg(feature = "nightly")] 86 | /// There's an issue with the resolution of `__diesel_use_everything` if the 87 | /// derive itself was generated from within a macro. This is a shitty workaround 88 | /// until we figure out the expected behavior. 89 | fn root_span(span: Span) -> Span { 90 | span.unstable().source().into() 91 | } 92 | -------------------------------------------------------------------------------- /wundergraph_example/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | authors = ["Georg Semmler "] 3 | name = "wundergraph_example" 4 | version = "0.1.0" 5 | license = "MIT OR Apache-2.0" 6 | publish = false 7 | repository = "https://github.com/weiznich/wundergraph" 8 | readme = "../README.md" 9 | keywords = ["GraphQL", "ORM", "PostgreSQL", "SQLite"] 10 | categories = ["database", "web-programming"] 11 | description = "A GraphQL ORM build on top of diesel" 12 | edition = "2018" 13 | 14 | [dependencies] 15 | diesel = { version = "1.4.0", features = ["r2d2", "chrono"]} 16 | diesel_migrations = "1.4.0" 17 | juniper = "0.14" 18 | actix-web = "1.0.0" 19 | indexmap = "1" 20 | serde = {version = "1", features = ["derive"]} 21 | serde_json = "1" 22 | env_logger = "0.7" 23 | structopt = "0.3" 24 | failure = "0.1" 25 | 26 | [dependencies.wundergraph] 27 | path = "../wundergraph" 28 | default-features = false 29 | 30 | [features] 31 | default = ["postgres", "wundergraph/debug"] 32 | sqlite = ["wundergraph/sqlite", "diesel/sqlite"] 33 | postgres = ["wundergraph/postgres", "diesel/postgres"] 34 | -------------------------------------------------------------------------------- /wundergraph_example/migrations/.gitkeep: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/weiznich/wundergraph/ed4986f3e24f38531726125783917bf61af0bbd0/wundergraph_example/migrations/.gitkeep -------------------------------------------------------------------------------- /wundergraph_example/migrations/pg/2018-01-24-131925_setup/down.sql: -------------------------------------------------------------------------------- 1 | -- This file should undo anything in `up.sql` 2 | 3 | DROP TABLE friends; 4 | DROP TABLE appears_in; 5 | DROP TABLE heros; 6 | DROP TABLE home_worlds; 7 | DROP TABLE species; 8 | -------------------------------------------------------------------------------- /wundergraph_example/migrations/pg/2018-01-24-131925_setup/up.sql: -------------------------------------------------------------------------------- 1 | -- Your SQL goes here 2 | 3 | CREATE TABLE species( 4 | id SERIAL PRIMARY KEY, 5 | name TEXT NOT NULL 6 | ); 7 | 8 | CREATE TABLE home_worlds( 9 | id SERIAL PRIMARY KEY, 10 | name TEXT NOT NULL 11 | ); 12 | 13 | CREATE TABLE heros( 14 | id SERIAL PRIMARY KEY, 15 | name TEXT NOT NULL, 16 | hair_color TEXT, 17 | species INTEGER NOT NULL REFERENCES species(id) ON DELETE CASCADE ON UPDATE RESTRICT, 18 | home_world INTEGER REFERENCES home_worlds(id) ON DELETE CASCADE ON UPDATE RESTRICT 19 | ); 20 | 21 | CREATE TABLE appears_in( 22 | hero_id INTEGER NOT NULL REFERENCES heros(id) ON DELETE CASCADE ON UPDATE RESTRICT, 23 | episode SMALLINT NOT NULL CHECK(episode IN (1,2,3)), 24 | PRIMARY KEY(hero_id, episode) 25 | ); 26 | 27 | CREATE TABLE friends( 28 | hero_id INTEGER NOT NULL REFERENCES heros(id) ON DELETE CASCADE ON UPDATE RESTRICT, 29 | friend_id INTEGER NOT NULL REFERENCES heros(id) ON DELETE CASCADE ON UPDATE RESTRICT, 30 | PRIMARY KEY(hero_id, friend_id) 31 | ); 32 | 33 | INSERT INTO species(id, name) VALUES (1, 'Human'), (2, 'Robot'); 34 | ALTER SEQUENCE species_id_seq RESTART WITH 3; 35 | 36 | INSERT INTO home_worlds(id, name) VALUES(1, 'Tatooine'), (2, 'Alderaan'); 37 | ALTER SEQUENCE home_worlds_id_seq RESTART WITH 3; 38 | 39 | INSERT INTO heros(id, name, species, home_world, hair_color) 40 | VALUES (1, 'Luke Skywalker', 1, 1, 'blond'), 41 | (2, 'Darth Vader', 1, 1, DEFAULT), 42 | (3, 'Han Solo', 1, Null, DEFAULT), 43 | (4, 'Leia Organa', 1, 2, DEFAULT), 44 | (5, 'Wilhuff Tarkin', 1, Null, DEFAULT); 45 | ALTER SEQUENCE heros_id_seq RESTART WITH 6; 46 | 47 | INSERT INTO appears_in(hero_id, episode) 48 | VALUES (1, 1), (1, 2), (1, 3), 49 | (2, 1), (2, 2), (2, 3), 50 | (3, 1), (3, 2), (3, 3), 51 | (4, 1), (4, 2), (4, 3), 52 | (5, 3); 53 | 54 | 55 | INSERT INTO friends(hero_id, friend_id) 56 | VALUES (1, 3), (1, 4), (2, 5), (3, 1), 57 | (3, 4), (4, 1), (4, 3), (5, 2); 58 | -------------------------------------------------------------------------------- /wundergraph_example/migrations/sqlite/2018-01-24-131925_setup/down.sql: -------------------------------------------------------------------------------- 1 | -- This file should undo anything in `up.sql` -------------------------------------------------------------------------------- /wundergraph_example/migrations/sqlite/2018-01-24-131925_setup/up.sql: -------------------------------------------------------------------------------- 1 | -- Your SQL goes here 2 | 3 | CREATE TABLE species( 4 | id INTEGER PRIMARY KEY AUTOINCREMENT, 5 | name TEXT NOT NULL 6 | ); 7 | 8 | CREATE TABLE home_worlds( 9 | id INTEGER PRIMARY KEY AUTOINCREMENT, 10 | name TEXT NOT NULL 11 | ); 12 | 13 | CREATE TABLE heros( 14 | id INTEGER PRIMARY KEY AUTOINCREMENT, 15 | name TEXT NOT NULL, 16 | hair_color TEXT, 17 | species INTEGER NOT NULL REFERENCES species(id) ON DELETE CASCADE ON UPDATE RESTRICT, 18 | home_world INTEGER REFERENCES home_worlds(id) ON DELETE CASCADE ON UPDATE RESTRICT 19 | ); 20 | 21 | CREATE TABLE appears_in( 22 | hero_id INTEGER NOT NULL REFERENCES heros(id) ON DELETE CASCADE ON UPDATE RESTRICT, 23 | episode SMALLINT NOT NULL CHECK(episode IN (1,2,3)), 24 | PRIMARY KEY(hero_id, episode) 25 | ); 26 | 27 | CREATE TABLE friends( 28 | hero_id INTEGER NOT NULL REFERENCES friends(id) ON DELETE CASCADE ON UPDATE RESTRICT, 29 | friend_id INTEGER NOT NULL REFERENCES friends(id) ON DELETE CASCADE ON UPDATE RESTRICT, 30 | PRIMARY KEY(hero_id, friend_id) 31 | ); 32 | 33 | INSERT INTO species(name) VALUES('Human'); 34 | INSERT INTO species(name) VALUES('Robot'); 35 | 36 | INSERT INTO home_worlds(name) VALUES('Tatooine'); 37 | INSERT INTO home_worlds(name) VALUES('Alderaan'); 38 | 39 | INSERT INTO heros(name, species, home_world, hair_color) VALUES ('Luke Skywalker', 1, 1, 'blond'); 40 | INSERT INTO heros(name, species, home_world) VALUES ('Darth Vader', 1, 1); 41 | INSERT INTO heros(name, species, home_world) VALUES ('Han Solo', 1, Null); 42 | INSERT INTO heros(name, species, home_world) VALUES ('Leia Organa', 1, 2); 43 | INSERT INTO heros(name, species, home_world) VALUES ('Wilhuff Tarkin', 1, Null); 44 | 45 | INSERT INTO appears_in(hero_id, episode) VALUES(1, 1); 46 | INSERT INTO appears_in(hero_id, episode) VALUES(1, 2); 47 | INSERT INTO appears_in(hero_id, episode) VALUES(1, 3); 48 | 49 | INSERT INTO appears_in(hero_id, episode) VALUES(2, 1); 50 | INSERT INTO appears_in(hero_id, episode) VALUES(2, 2); 51 | INSERT INTO appears_in(hero_id, episode) VALUES(2, 3); 52 | 53 | INSERT INTO appears_in(hero_id, episode) VALUES(3, 1); 54 | INSERT INTO appears_in(hero_id, episode) VALUES(3, 2); 55 | INSERT INTO appears_in(hero_id, episode) VALUES(3, 3); 56 | 57 | INSERT INTO appears_in(hero_id, episode) VALUES(4, 1); 58 | INSERT INTO appears_in(hero_id, episode) VALUES(4, 2); 59 | INSERT INTO appears_in(hero_id, episode) VALUES(4, 3); 60 | 61 | INSERT INTO appears_in(hero_id, episode) VALUES(5, 3); 62 | 63 | INSERT INTO friends(hero_id, friend_id) VALUES(1, 3); 64 | INSERT INTO friends(hero_id, friend_id) VALUES(1, 4); 65 | 66 | INSERT INTO friends(hero_id, friend_id) VALUES(2, 5); 67 | 68 | INSERT INTO friends(hero_id, friend_id) VALUES(3, 1); 69 | INSERT INTO friends(hero_id, friend_id) VALUES(3, 4); 70 | 71 | INSERT INTO friends(hero_id, friend_id) VALUES(4, 1); 72 | INSERT INTO friends(hero_id, friend_id) VALUES(4, 3); 73 | 74 | INSERT INTO friends(hero_id, friend_id) VALUES(5, 2); 75 | -------------------------------------------------------------------------------- /wundergraph_example/src/bin/wundergraph_example.rs: -------------------------------------------------------------------------------- 1 | #![deny(missing_debug_implementations, missing_copy_implementations)] 2 | #![warn( 3 | clippy::print_stdout, 4 | clippy::wrong_pub_self_convention, 5 | clippy::mut_mut, 6 | clippy::non_ascii_literal, 7 | clippy::similar_names, 8 | clippy::unicode_not_nfc, 9 | clippy::enum_glob_use, 10 | clippy::if_not_else, 11 | clippy::items_after_statements, 12 | clippy::used_underscore_binding, 13 | clippy::cargo_common_metadata, 14 | clippy::dbg_macro, 15 | clippy::doc_markdown, 16 | clippy::filter_map, 17 | clippy::map_flatten, 18 | clippy::match_same_arms, 19 | clippy::needless_borrow, 20 | clippy::option_map_unwrap_or, 21 | clippy::option_map_unwrap_or_else, 22 | clippy::redundant_clone, 23 | clippy::result_map_unwrap_or_else, 24 | clippy::unnecessary_unwrap, 25 | clippy::unseparated_literal_suffix, 26 | clippy::wildcard_dependencies 27 | )] 28 | 29 | use actix_web::web::{Data, Json}; 30 | use actix_web::{middleware, web, App, HttpResponse, HttpServer}; 31 | use diesel::r2d2::{ConnectionManager, Pool}; 32 | use juniper::graphiql::graphiql_source; 33 | use juniper::http::GraphQLRequest; 34 | use serde::{Deserialize, Serialize}; 35 | use std::path::PathBuf; 36 | use std::sync::Arc; 37 | use structopt::StructOpt; 38 | use wundergraph::scalar::WundergraphScalarValue; 39 | use wundergraph_example::mutations::Mutation; 40 | use wundergraph_example::*; 41 | 42 | #[derive(Debug, StructOpt)] 43 | #[structopt(name = "wundergraph_example")] 44 | struct Opt { 45 | #[structopt(short = "u", long = "db-url")] 46 | database_url: String, 47 | #[structopt(short = "s", long = "socket", default_value = "127.0.0.1:8000")] 48 | socket: String, 49 | } 50 | 51 | // actix integration stuff 52 | #[derive(Serialize, Deserialize, Debug)] 53 | pub struct GraphQLData(GraphQLRequest); 54 | 55 | #[derive(Clone)] 56 | struct AppState { 57 | schema: Arc>>, 58 | pool: Arc>>, 59 | } 60 | 61 | fn graphiql() -> HttpResponse { 62 | let html = graphiql_source("/graphql"); 63 | HttpResponse::Ok() 64 | .content_type("text/html; charset=utf-8") 65 | .body(html) 66 | } 67 | 68 | fn graphql( 69 | Json(GraphQLData(data)): Json, 70 | st: Data, 71 | ) -> Result { 72 | let ctx = MyContext::new(st.get_ref().pool.get()?); 73 | let res = data.execute(&st.get_ref().schema, &ctx); 74 | Ok(HttpResponse::Ok() 75 | .content_type("application/json") 76 | .body(serde_json::to_string(&res)?)) 77 | } 78 | 79 | fn run_migrations(conn: &DBConnection) { 80 | let mut migration_path = PathBuf::from(env!("CARGO_MANIFEST_DIR")); 81 | migration_path.push("migrations"); 82 | if cfg!(feature = "postgres") { 83 | migration_path.push("pg"); 84 | } else if cfg!(feature = "sqlite") { 85 | migration_path.push("sqlite"); 86 | } 87 | let pending_migrations = 88 | ::diesel_migrations::mark_migrations_in_directory(conn, &migration_path) 89 | .unwrap() 90 | .into_iter() 91 | .filter_map(|(migration, run)| if run { None } else { Some(migration) }); 92 | 93 | ::diesel_migrations::run_migrations(conn, pending_migrations, &mut ::std::io::stdout()) 94 | .expect("Failed to run migrations"); 95 | } 96 | 97 | #[allow(clippy::print_stdout)] 98 | fn main() { 99 | let opt = Opt::from_args(); 100 | ::std::env::set_var("RUST_LOG", "actix_web=info"); 101 | env_logger::init(); 102 | let manager = ConnectionManager::::new(opt.database_url); 103 | let pool = Pool::builder() 104 | .max_size(1) 105 | .build(manager) 106 | .expect("Failed to init pool"); 107 | 108 | run_migrations(&pool.get().expect("Failed to get db connection")); 109 | 110 | let query = Query::>::default(); 111 | let mutation = Mutation::>::default(); 112 | let schema = Schema::new(query, mutation); 113 | 114 | let schema = Arc::new(schema); 115 | let pool = Arc::new(pool); 116 | let data = AppState { schema, pool }; 117 | 118 | let url = opt.socket; 119 | 120 | println!("Started http server: http://{}", url); 121 | 122 | HttpServer::new(move || { 123 | App::new() 124 | .data(data.clone()) 125 | .wrap(middleware::Logger::default()) 126 | .route("/graphql", web::get().to(graphql)) 127 | .route("/graphql", web::post().to(graphql)) 128 | .route("/graphiql", web::get().to(graphiql)) 129 | .default_service(web::route().to(|| { 130 | HttpResponse::Found() 131 | .header("location", "/graphiql") 132 | .finish() 133 | })) 134 | }) 135 | .bind(&url) 136 | .expect("Failed to start server") 137 | .run() 138 | .unwrap(); 139 | } 140 | -------------------------------------------------------------------------------- /wundergraph_example/src/mutations.rs: -------------------------------------------------------------------------------- 1 | use super::appears_in; 2 | use super::friends; 3 | use super::heros; 4 | use super::home_worlds; 5 | use super::species; 6 | use super::AppearsIn; 7 | use super::Episode; 8 | use super::Friend; 9 | use super::Hero; 10 | use super::HomeWorld; 11 | use super::Species; 12 | use juniper::*; 13 | 14 | #[derive(Insertable, GraphQLInputObject, Clone, Debug)] 15 | #[table_name = "heros"] 16 | pub struct NewHero { 17 | name: String, 18 | hair_color: Option, 19 | species: i32, 20 | home_world: Option, 21 | } 22 | 23 | #[derive(AsChangeset, GraphQLInputObject, Identifiable, Debug)] 24 | #[table_name = "heros"] 25 | pub struct HeroChangeset { 26 | id: i32, 27 | name: Option, 28 | hair_color: Option, 29 | species: Option, 30 | home_world: Option, 31 | } 32 | 33 | #[derive(Insertable, GraphQLInputObject, Clone, Debug)] 34 | #[table_name = "species"] 35 | pub struct NewSpecies { 36 | name: String, 37 | } 38 | 39 | #[derive(AsChangeset, GraphQLInputObject, Identifiable, Debug)] 40 | #[table_name = "species"] 41 | pub struct SpeciesChangeset { 42 | id: i32, 43 | name: Option, 44 | } 45 | 46 | #[derive(Insertable, GraphQLInputObject, Debug)] 47 | #[table_name = "home_worlds"] 48 | pub struct NewHomeWorld { 49 | name: String, 50 | } 51 | 52 | #[derive(AsChangeset, GraphQLInputObject, Identifiable, Debug)] 53 | #[table_name = "home_worlds"] 54 | pub struct HomeWorldChangeset { 55 | id: i32, 56 | name: Option, 57 | } 58 | 59 | #[derive(Insertable, GraphQLInputObject, Debug, Copy, Clone)] 60 | #[table_name = "friends"] 61 | pub struct NewFriend { 62 | hero_id: i32, 63 | friend_id: i32, 64 | } 65 | 66 | #[derive(Insertable, GraphQLInputObject, Debug, Copy, Clone)] 67 | #[table_name = "appears_in"] 68 | pub struct NewAppearsIn { 69 | hero_id: i32, 70 | episode: Episode, 71 | } 72 | 73 | wundergraph::mutation_object! { 74 | /// Global mutation object for the schema 75 | Mutation { 76 | Hero(insert = NewHero, update = HeroChangeset,), 77 | Species(insert = NewSpecies, update = SpeciesChangeset,), 78 | HomeWorld(insert = NewHomeWorld, update = HomeWorldChangeset,), 79 | Friend( insert = NewFriend, update = false), 80 | AppearsIn(insert = NewAppearsIn, ), 81 | } 82 | } 83 | --------------------------------------------------------------------------------