├── .circleci └── config.yml ├── .eslintrc.js ├── .gitattributes ├── .gitignore ├── .prettierrc.json ├── LICENSE ├── README.md ├── __tests__ ├── fixtures │ └── queries │ │ └── derived-field.graphql ├── helpers.js ├── integration │ ├── __snapshots__ │ │ ├── queries.test.js.snap │ │ └── schema.test.js.snap │ ├── derivedFieldDefinitions.js │ ├── queries.test.js │ └── schema.test.js ├── plugin-test-data.sql ├── plugin-test-schema.sql └── printSchemaOrdered.js ├── index.js ├── package.json ├── scripts └── test ├── src └── DerivedFieldPlugin.js └── yarn.lock /.circleci/config.yml: -------------------------------------------------------------------------------- 1 | version: 2 2 | jobs: 3 | node10_pg10: 4 | docker: 5 | - image: circleci/node:10 6 | - image: circleci/postgres:10 7 | environment: 8 | POSTGRES_HOST_AUTH_METHOD: trust 9 | POSTGRES_USER: circleci 10 | POSTGRES_DB: circle_test 11 | steps: 12 | - checkout 13 | - run: sudo apt update 14 | - run: sudo apt install -y postgresql-client 15 | - restore_cache: 16 | keys: 17 | - yarn-packages-{{ checksum "yarn.lock" }} 18 | - run: 19 | command: yarn install --frozen-lockfile 20 | - save_cache: 21 | key: yarn-packages-{{ checksum "yarn.lock" }} 22 | paths: 23 | - ~/.cache/yarn 24 | - run: yarn lint 25 | - run: 26 | command: yarn test 27 | environment: 28 | TEST_DATABASE_URL: postgres://circleci@localhost:5432/circle_test 29 | node10_pg11: 30 | docker: 31 | - image: circleci/node:10 32 | - image: circleci/postgres:11 33 | environment: 34 | POSTGRES_HOST_AUTH_METHOD: trust 35 | POSTGRES_USER: circleci 36 | POSTGRES_DB: circle_test 37 | steps: 38 | - checkout 39 | - run: sudo apt update 40 | - run: sudo apt install -y postgresql-client 41 | - restore_cache: 42 | keys: 43 | - yarn-packages-{{ checksum "yarn.lock" }} 44 | - run: 45 | command: yarn install --frozen-lockfile 46 | - save_cache: 47 | key: yarn-packages-{{ checksum "yarn.lock" }} 48 | paths: 49 | - ~/.cache/yarn 50 | - run: yarn lint 51 | - run: 52 | command: yarn test 53 | environment: 54 | TEST_DATABASE_URL: postgres://circleci@localhost:5432/circle_test 55 | node12_pg12: 56 | docker: 57 | - image: circleci/node:12 58 | - image: circleci/postgres:12 59 | environment: 60 | POSTGRES_HOST_AUTH_METHOD: trust 61 | POSTGRES_USER: circleci 62 | POSTGRES_DB: circle_test 63 | steps: 64 | - checkout 65 | - run: sudo apt update 66 | - run: sudo apt install -y postgresql-client 67 | - restore_cache: 68 | keys: 69 | - yarn-packages-{{ checksum "yarn.lock" }} 70 | - run: 71 | command: yarn install --frozen-lockfile 72 | - save_cache: 73 | key: yarn-packages-{{ checksum "yarn.lock" }} 74 | paths: 75 | - ~/.cache/yarn 76 | - run: yarn lint 77 | - run: 78 | command: yarn test 79 | environment: 80 | TEST_DATABASE_URL: postgres://circleci@localhost:5432/circle_test 81 | workflows: 82 | version: 2 83 | test: 84 | jobs: 85 | - node10_pg10 86 | - node10_pg11 87 | - node12_pg12 88 | -------------------------------------------------------------------------------- /.eslintrc.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | env: { 3 | node: true, 4 | es6: true, 5 | "jest/globals": true, 6 | }, 7 | parserOptions: { 8 | ecmaVersion: 9, 9 | }, 10 | plugins: ["jest"], 11 | extends: [ 12 | "eslint:recommended", 13 | "plugin:jest/recommended", 14 | "plugin:prettier/recommended", 15 | ], 16 | }; 17 | -------------------------------------------------------------------------------- /.gitattributes: -------------------------------------------------------------------------------- 1 | * text=auto 2 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | node_modules 2 | -------------------------------------------------------------------------------- /.prettierrc.json: -------------------------------------------------------------------------------- 1 | { 2 | "trailingComma": "es5" 3 | } -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2018 Matt Bretl 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | [![Package on npm](https://img.shields.io/npm/v/postgraphile-plugin-derived-field.svg)](https://www.npmjs.com/package/postgraphile-plugin-derived-field) [![CircleCI](https://circleci.com/gh/graphile-contrib/postgraphile-plugin-derived-field.svg?style=svg)](https://circleci.com/gh/graphile-contrib/postgraphile-plugin-derived-field) 2 | 3 | # postgraphile-plugin-derived-field 4 | 5 | This plugin provides an interface for adding derived fields to the schema generated by PostGraphile v4. 6 | 7 | The term "derived fields" is used to differentiate this approach from the standard [Computed Columns](https://www.graphile.org/postgraphile/computed-columns/) support in PostGraphile. This plugin effectively adds "computed columns in JavaScript" to your toolbelt. 8 | 9 | ## Getting Started 10 | 11 | Define your derived fields in the `derivedFieldDefinitions` property of `graphileBuildOptions`: 12 | 13 | ``` js 14 | const express = require("express"); 15 | const { postgraphile } = require("postgraphile"); 16 | const PostGraphileDerivedFieldPlugin = require("postgraphile-plugin-derived-field"); 17 | 18 | const app = express(); 19 | 20 | app.use( 21 | postgraphile(pgConfig, schema, { 22 | graphiql: true, 23 | appendPlugins: [PostGraphileDerivedFieldPlugin], 24 | graphileBuildOptions: { 25 | derivedFieldDefinitions: [ 26 | // your definitions here 27 | ] 28 | } 29 | }) 30 | ); 31 | 32 | app.listen(5000); 33 | ``` 34 | 35 | Provide `derivedFieldDefinitions` as an array of objects with the following structure: 36 | 37 | ``` 38 | { 39 | identifiers: Array, 40 | inflect: function, 41 | resolve: function, 42 | type?: string | build => T, 43 | description?: string 44 | } 45 | ``` 46 | The Scenarios section below provides guidance on structuring `identifiers`, `inflect`, and `resolve` for your specific use case. 47 | 48 | Use `type` to specify the GraphQL type that the `resolve` function returns. This can be a string identifying the GraphQL type name, or a function (with the `build` helper available as the first argument) that returns a GraphQL type. Default: String 49 | 50 | Use `description` to populate the field description in the schema. 51 | 52 | ## Scenarios 53 | 54 | ### Derive a new field from one database column 55 | 56 | ``` js 57 | { 58 | identifiers: [ 59 | { 60 | table: "my_schema.my_table", 61 | columns: ["my_column"], 62 | }, 63 | ], 64 | inflect: fieldName => `derivedFrom${fieldName}`, 65 | resolve: val => `Value derived from ${val}`, 66 | } 67 | ``` 68 | 69 | ### Derive a new field from multiple database columns 70 | 71 | ``` js 72 | { 73 | identifiers: [ 74 | { 75 | table: "my_schema.my_table", 76 | columns: ["my_column", "my_other_column"], 77 | }, 78 | ], 79 | inflect: (...fieldNames) => 80 | `derivedFrom${fieldNames.map(upperFirst).join("And")}`, 81 | resolve: (my_column, my_other_column) => 82 | `Value derived from ${my_column} and ${my_other_column}`, 83 | } 84 | ``` 85 | 86 | ### Derive a new field from columns that have a specific tag 87 | 88 | This approach uses the "smart comments" feature of PostGraphile to match columns that have been assigned a specific tag. 89 | 90 | ``` js 91 | { 92 | identifiers: [ 93 | { 94 | tag: "mytag", 95 | }, 96 | ], 97 | inflect: fieldName => `derivedFrom${fieldName}`, 98 | resolve: val => `Value derived from ${val}`, 99 | } 100 | ``` 101 | 102 | 105 | 106 | ## Alternative syntax 107 | 108 | Column names can be provided as a string: `"my_schema.my_table.my_column"`. This syntax does not support generating a derived field from multiple database columns. 109 | 110 | Tag names can be provided as a string: `"@mytag"` 111 | 112 | ## Examples 113 | 114 |
115 | 116 | Generate pre-signed URLs for client-side S3 GET requests 117 | 118 | ``` js 119 | const express = require("express"); 120 | const { postgraphile } = require("postgraphile"); 121 | const PostGraphileDerivedFieldPlugin = require("postgraphile-plugin-derived-field"); 122 | 123 | const AWS = require("aws-sdk"); 124 | const s3 = new AWS.S3(); 125 | const bucket = "postgraphile-plugin-test"; 126 | 127 | const app = express(); 128 | 129 | app.use( 130 | postgraphile(pgConfig, schema, { 131 | graphiql: true, 132 | appendPlugins: [PostGraphileDerivedFieldPlugin], 133 | graphileBuildOptions: { 134 | derivedFieldDefinitions: [ 135 | { 136 | identifiers: ["my_schema.my_table.my_column"], 137 | inflect: fieldName => `${fieldName}SignedUrl`, 138 | resolve: val => s3.getSignedUrl('getObject', {Bucket: bucket, Key: val, Expires: 900}) 139 | } 140 | ] 141 | } 142 | }) 143 | ); 144 | 145 | app.listen(5000); 146 | ``` 147 | 148 |
149 | -------------------------------------------------------------------------------- /__tests__/fixtures/queries/derived-field.graphql: -------------------------------------------------------------------------------- 1 | query { 2 | oneColumn_derivedField: allPeople { nodes { nameInitials } } 3 | oneColumn_derivedField_sourceField: allPeople { nodes { nameInitials, name } } 4 | oneColumn_sourceField: allPeople { nodes { name } } 5 | twoColumn_derivedField: allPeople { nodes { combinedNameAndEmail } } 6 | twoColumn_derivedField_sourceField1: allPeople { nodes { combinedNameAndEmail, name } } 7 | twoColumn_derivedField_sourceField1_sourceField2: allPeople { nodes { combinedNameAndEmail, name, email } } 8 | twoColumn_sourceField1_sourceField2: allPeople { nodes { name, email } } 9 | withReturnTypeBoolean: allPeople { nodes { hasName } } 10 | withReturnTypeComposite: allPeople { nodes { name, menuExtrasWithDefaults { menuExtra, isEnabled } } } 11 | withTaggedColumn: allPeople { nodes { name, email, avatarUrl } } 12 | } 13 | -------------------------------------------------------------------------------- /__tests__/helpers.js: -------------------------------------------------------------------------------- 1 | const pg = require("pg"); 2 | const { readFile } = require("fs"); 3 | const pgConnectionString = require("pg-connection-string"); 4 | 5 | // This test suite can be flaky. Increase it’s timeout. 6 | jest.setTimeout(1000 * 20); 7 | 8 | function readFilePromise(filename, encoding) { 9 | return new Promise((resolve, reject) => { 10 | readFile(filename, encoding, (err, res) => { 11 | if (err) reject(err); 12 | else resolve(res); 13 | }); 14 | }); 15 | } 16 | 17 | const withPgClient = async (url, fn) => { 18 | if (!fn) { 19 | fn = url; 20 | url = process.env.TEST_DATABASE_URL; 21 | } 22 | const pgPool = new pg.Pool(pgConnectionString.parse(url)); 23 | let client; 24 | try { 25 | client = await pgPool.connect(); 26 | await client.query("begin"); 27 | await client.query("set local timezone to '+04:00'"); 28 | const result = await fn(client); 29 | await client.query("rollback"); 30 | return result; 31 | } finally { 32 | try { 33 | await client.release(); 34 | } catch (e) { 35 | console.error("Error releasing pgClient", e); // eslint-disable-line no-console 36 | } 37 | await pgPool.end(); 38 | } 39 | }; 40 | 41 | const withDbFromUrl = async (url, fn) => { 42 | return withPgClient(url, async client => { 43 | try { 44 | await client.query("BEGIN ISOLATION LEVEL SERIALIZABLE;"); 45 | return fn(client); 46 | } finally { 47 | await client.query("COMMIT;"); 48 | } 49 | }); 50 | }; 51 | 52 | const withRootDb = fn => withDbFromUrl(process.env.TEST_DATABASE_URL, fn); 53 | 54 | let prepopulatedDBKeepalive; 55 | 56 | const populateDatabase = async client => { 57 | await client.query( 58 | await readFilePromise(`${__dirname}/plugin-test-data.sql`, "utf8") 59 | ); 60 | return {}; 61 | }; 62 | 63 | const withPrepopulatedDb = async fn => { 64 | if (!prepopulatedDBKeepalive) { 65 | throw new Error("You must call setup and teardown to use this"); 66 | } 67 | const { client, vars } = prepopulatedDBKeepalive; 68 | if (!vars) { 69 | throw new Error("No prepopulated vars"); 70 | } 71 | let err; 72 | try { 73 | await fn(client, vars); 74 | } catch (e) { 75 | err = e; 76 | } 77 | try { 78 | await client.query("ROLLBACK TO SAVEPOINT pristine;"); 79 | } catch (e) { 80 | err = err || e; 81 | console.error("ERROR ROLLING BACK", e.message); // eslint-disable-line no-console 82 | } 83 | if (err) { 84 | throw err; 85 | } 86 | }; 87 | 88 | withPrepopulatedDb.setup = done => { 89 | if (prepopulatedDBKeepalive) { 90 | throw new Error("There's already a prepopulated DB running"); 91 | } 92 | let res; 93 | let rej; 94 | prepopulatedDBKeepalive = new Promise((resolve, reject) => { 95 | res = resolve; 96 | rej = reject; 97 | }); 98 | prepopulatedDBKeepalive.resolve = res; 99 | prepopulatedDBKeepalive.reject = rej; 100 | withRootDb(async client => { 101 | prepopulatedDBKeepalive.client = client; 102 | try { 103 | prepopulatedDBKeepalive.vars = await populateDatabase(client); 104 | } catch (e) { 105 | console.error("FAILED TO PREPOPULATE DB!", e.message); // eslint-disable-line no-console 106 | return done(e); 107 | } 108 | await client.query("SAVEPOINT pristine;"); 109 | done(); 110 | return prepopulatedDBKeepalive; 111 | }); 112 | }; 113 | 114 | withPrepopulatedDb.teardown = () => { 115 | if (!prepopulatedDBKeepalive) { 116 | throw new Error("Cannot tear down null!"); 117 | } 118 | prepopulatedDBKeepalive.resolve(); // Release DB transaction 119 | prepopulatedDBKeepalive = null; 120 | }; 121 | 122 | exports.withRootDb = withRootDb; 123 | exports.withPrepopulatedDb = withPrepopulatedDb; 124 | exports.withPgClient = withPgClient; 125 | -------------------------------------------------------------------------------- /__tests__/integration/__snapshots__/queries.test.js.snap: -------------------------------------------------------------------------------- 1 | // Jest Snapshot v1, https://goo.gl/fbAQLP 2 | 3 | exports[`derived-field.graphql 1`] = ` 4 | Object { 5 | "data": Object { 6 | "oneColumn_derivedField": Object { 7 | "nodes": Array [ 8 | Object { 9 | "nameInitials": "JS", 10 | }, 11 | Object { 12 | "nameInitials": "SS", 13 | }, 14 | ], 15 | }, 16 | "oneColumn_derivedField_sourceField": Object { 17 | "nodes": Array [ 18 | Object { 19 | "name": "John Smith", 20 | "nameInitials": "JS", 21 | }, 22 | Object { 23 | "name": "Sara Smith", 24 | "nameInitials": "SS", 25 | }, 26 | ], 27 | }, 28 | "oneColumn_sourceField": Object { 29 | "nodes": Array [ 30 | Object { 31 | "name": "John Smith", 32 | }, 33 | Object { 34 | "name": "Sara Smith", 35 | }, 36 | ], 37 | }, 38 | "twoColumn_derivedField": Object { 39 | "nodes": Array [ 40 | Object { 41 | "combinedNameAndEmail": "John Smith (john.smith@email.com)", 42 | }, 43 | Object { 44 | "combinedNameAndEmail": "Sara Smith (sara.smith@email.com)", 45 | }, 46 | ], 47 | }, 48 | "twoColumn_derivedField_sourceField1": Object { 49 | "nodes": Array [ 50 | Object { 51 | "combinedNameAndEmail": "John Smith (john.smith@email.com)", 52 | "name": "John Smith", 53 | }, 54 | Object { 55 | "combinedNameAndEmail": "Sara Smith (sara.smith@email.com)", 56 | "name": "Sara Smith", 57 | }, 58 | ], 59 | }, 60 | "twoColumn_derivedField_sourceField1_sourceField2": Object { 61 | "nodes": Array [ 62 | Object { 63 | "combinedNameAndEmail": "John Smith (john.smith@email.com)", 64 | "email": "john.smith@email.com", 65 | "name": "John Smith", 66 | }, 67 | Object { 68 | "combinedNameAndEmail": "Sara Smith (sara.smith@email.com)", 69 | "email": "sara.smith@email.com", 70 | "name": "Sara Smith", 71 | }, 72 | ], 73 | }, 74 | "twoColumn_sourceField1_sourceField2": Object { 75 | "nodes": Array [ 76 | Object { 77 | "email": "john.smith@email.com", 78 | "name": "John Smith", 79 | }, 80 | Object { 81 | "email": "sara.smith@email.com", 82 | "name": "Sara Smith", 83 | }, 84 | ], 85 | }, 86 | "withReturnTypeBoolean": Object { 87 | "nodes": Array [ 88 | Object { 89 | "hasName": true, 90 | }, 91 | Object { 92 | "hasName": true, 93 | }, 94 | ], 95 | }, 96 | "withReturnTypeComposite": Object { 97 | "nodes": Array [ 98 | Object { 99 | "menuExtrasWithDefaults": Array [], 100 | "name": "John Smith", 101 | }, 102 | Object { 103 | "menuExtrasWithDefaults": Array [], 104 | "name": "Sara Smith", 105 | }, 106 | ], 107 | }, 108 | "withTaggedColumn": Object { 109 | "nodes": Array [ 110 | Object { 111 | "avatarUrl": "https://example.com/15781368", 112 | "email": "john.smith@email.com", 113 | "name": "John Smith", 114 | }, 115 | Object { 116 | "avatarUrl": "https://example.com/19883378", 117 | "email": "sara.smith@email.com", 118 | "name": "Sara Smith", 119 | }, 120 | ], 121 | }, 122 | }, 123 | } 124 | `; 125 | -------------------------------------------------------------------------------- /__tests__/integration/__snapshots__/schema.test.js.snap: -------------------------------------------------------------------------------- 1 | // Jest Snapshot v1, https://goo.gl/fbAQLP 2 | 3 | exports[`prints a schema with the plugin and some derived field definitions 1`] = ` 4 | "\\"\\"\\"All input for the create \`Person\` mutation.\\"\\"\\" 5 | input CreatePersonInput { 6 | \\"\\"\\" 7 | An arbitrary string value with no semantic meaning. Will be included in the 8 | payload verbatim. May be used to track mutations by the client. 9 | \\"\\"\\" 10 | clientMutationId: String 11 | 12 | \\"\\"\\"The \`Person\` to be created by this mutation.\\"\\"\\" 13 | person: PersonInput! 14 | } 15 | 16 | \\"\\"\\"The output of our create \`Person\` mutation.\\"\\"\\" 17 | type CreatePersonPayload { 18 | \\"\\"\\" 19 | The exact same \`clientMutationId\` that was provided in the mutation input, 20 | unchanged and unused. May be used by a client to track mutations. 21 | \\"\\"\\" 22 | clientMutationId: String 23 | 24 | \\"\\"\\"The \`Person\` that was created by this mutation.\\"\\"\\" 25 | person: Person 26 | 27 | \\"\\"\\"An edge for our \`Person\`. May be used by Relay 1.\\"\\"\\" 28 | personEdge( 29 | \\"\\"\\"The method to use when ordering \`Person\`.\\"\\"\\" 30 | orderBy: [PeopleOrderBy!] = [PRIMARY_KEY_ASC] 31 | ): PeopleEdge 32 | 33 | \\"\\"\\" 34 | Our root query field type. Allows us to run any query from our mutation payload. 35 | \\"\\"\\" 36 | query: Query 37 | } 38 | 39 | \\"\\"\\"A location in a connection that can be used for resuming pagination.\\"\\"\\" 40 | scalar Cursor 41 | 42 | \\"\\"\\"All input for the \`deletePersonById\` mutation.\\"\\"\\" 43 | input DeletePersonByIdInput { 44 | \\"\\"\\" 45 | An arbitrary string value with no semantic meaning. Will be included in the 46 | payload verbatim. May be used to track mutations by the client. 47 | \\"\\"\\" 48 | clientMutationId: String 49 | id: Int! 50 | } 51 | 52 | \\"\\"\\"All input for the \`deletePerson\` mutation.\\"\\"\\" 53 | input DeletePersonInput { 54 | \\"\\"\\" 55 | An arbitrary string value with no semantic meaning. Will be included in the 56 | payload verbatim. May be used to track mutations by the client. 57 | \\"\\"\\" 58 | clientMutationId: String 59 | 60 | \\"\\"\\" 61 | The globally unique \`ID\` which will identify a single \`Person\` to be deleted. 62 | \\"\\"\\" 63 | nodeId: ID! 64 | } 65 | 66 | \\"\\"\\"The output of our delete \`Person\` mutation.\\"\\"\\" 67 | type DeletePersonPayload { 68 | \\"\\"\\" 69 | The exact same \`clientMutationId\` that was provided in the mutation input, 70 | unchanged and unused. May be used by a client to track mutations. 71 | \\"\\"\\" 72 | clientMutationId: String 73 | deletedPersonId: ID 74 | 75 | \\"\\"\\"The \`Person\` that was deleted by this mutation.\\"\\"\\" 76 | person: Person 77 | 78 | \\"\\"\\"An edge for our \`Person\`. May be used by Relay 1.\\"\\"\\" 79 | personEdge( 80 | \\"\\"\\"The method to use when ordering \`Person\`.\\"\\"\\" 81 | orderBy: [PeopleOrderBy!] = [PRIMARY_KEY_ASC] 82 | ): PeopleEdge 83 | 84 | \\"\\"\\" 85 | Our root query field type. Allows us to run any query from our mutation payload. 86 | \\"\\"\\" 87 | query: Query 88 | } 89 | 90 | enum MenuExtrasType { 91 | EMAILS 92 | LOCATIONS 93 | LOGO 94 | PHONES 95 | SEARCH 96 | } 97 | 98 | type MenuExtrasWithDefault { 99 | isEnabled: Boolean 100 | menuExtra: MenuExtrasType 101 | } 102 | 103 | \\"\\"\\" 104 | The root mutation type which contains root level fields which mutate data. 105 | \\"\\"\\" 106 | type Mutation { 107 | \\"\\"\\"Creates a single \`Person\`.\\"\\"\\" 108 | createPerson( 109 | \\"\\"\\" 110 | The exclusive input argument for this mutation. An object type, make sure to see documentation for this object’s fields. 111 | \\"\\"\\" 112 | input: CreatePersonInput! 113 | ): CreatePersonPayload 114 | 115 | \\"\\"\\"Deletes a single \`Person\` using its globally unique id.\\"\\"\\" 116 | deletePerson( 117 | \\"\\"\\" 118 | The exclusive input argument for this mutation. An object type, make sure to see documentation for this object’s fields. 119 | \\"\\"\\" 120 | input: DeletePersonInput! 121 | ): DeletePersonPayload 122 | 123 | \\"\\"\\"Deletes a single \`Person\` using a unique key.\\"\\"\\" 124 | deletePersonById( 125 | \\"\\"\\" 126 | The exclusive input argument for this mutation. An object type, make sure to see documentation for this object’s fields. 127 | \\"\\"\\" 128 | input: DeletePersonByIdInput! 129 | ): DeletePersonPayload 130 | 131 | \\"\\"\\"Updates a single \`Person\` using its globally unique id and a patch.\\"\\"\\" 132 | updatePerson( 133 | \\"\\"\\" 134 | The exclusive input argument for this mutation. An object type, make sure to see documentation for this object’s fields. 135 | \\"\\"\\" 136 | input: UpdatePersonInput! 137 | ): UpdatePersonPayload 138 | 139 | \\"\\"\\"Updates a single \`Person\` using a unique key and a patch.\\"\\"\\" 140 | updatePersonById( 141 | \\"\\"\\" 142 | The exclusive input argument for this mutation. An object type, make sure to see documentation for this object’s fields. 143 | \\"\\"\\" 144 | input: UpdatePersonByIdInput! 145 | ): UpdatePersonPayload 146 | } 147 | 148 | \\"\\"\\"An object with a globally unique \`ID\`.\\"\\"\\" 149 | interface Node { 150 | \\"\\"\\" 151 | A globally unique identifier. Can be used in various places throughout the system to identify this single value. 152 | \\"\\"\\" 153 | nodeId: ID! 154 | } 155 | 156 | \\"\\"\\"Information about pagination in a connection.\\"\\"\\" 157 | type PageInfo { 158 | \\"\\"\\"When paginating forwards, the cursor to continue.\\"\\"\\" 159 | endCursor: Cursor 160 | 161 | \\"\\"\\"When paginating forwards, are there more items?\\"\\"\\" 162 | hasNextPage: Boolean! 163 | 164 | \\"\\"\\"When paginating backwards, are there more items?\\"\\"\\" 165 | hasPreviousPage: Boolean! 166 | 167 | \\"\\"\\"When paginating backwards, the cursor to continue.\\"\\"\\" 168 | startCursor: Cursor 169 | } 170 | 171 | \\"\\"\\"A connection to a list of \`Person\` values.\\"\\"\\" 172 | type PeopleConnection { 173 | \\"\\"\\" 174 | A list of edges which contains the \`Person\` and cursor to aid in pagination. 175 | \\"\\"\\" 176 | edges: [PeopleEdge!]! 177 | 178 | \\"\\"\\"A list of \`Person\` objects.\\"\\"\\" 179 | nodes: [Person]! 180 | 181 | \\"\\"\\"Information to aid in pagination.\\"\\"\\" 182 | pageInfo: PageInfo! 183 | 184 | \\"\\"\\"The count of *all* \`Person\` you could get from the connection.\\"\\"\\" 185 | totalCount: Int 186 | } 187 | 188 | \\"\\"\\"A \`Person\` edge in the connection.\\"\\"\\" 189 | type PeopleEdge { 190 | \\"\\"\\"A cursor for use in pagination.\\"\\"\\" 191 | cursor: Cursor 192 | 193 | \\"\\"\\"The \`Person\` at the end of the edge.\\"\\"\\" 194 | node: Person 195 | } 196 | 197 | \\"\\"\\"Methods to use when ordering \`Person\`.\\"\\"\\" 198 | enum PeopleOrderBy { 199 | AVATAR_KEY_ASC 200 | AVATAR_KEY_DESC 201 | EMAIL_ASC 202 | EMAIL_DESC 203 | ID_ASC 204 | ID_DESC 205 | MENU_EXTRAS_ASC 206 | MENU_EXTRAS_DESC 207 | NAME_ASC 208 | NAME_DESC 209 | NATURAL 210 | PRIMARY_KEY_ASC 211 | PRIMARY_KEY_DESC 212 | } 213 | 214 | type Person implements Node { 215 | avatarKey: String 216 | avatarUrl: String 217 | 218 | \\"\\"\\"The person’s name and email\\"\\"\\" 219 | combinedNameAndEmail: String 220 | 221 | \\"\\"\\"The person’s email\\"\\"\\" 222 | email: String! 223 | hasName: Boolean 224 | id: Int! 225 | menuExtras: [MenuExtrasType] 226 | menuExtrasWithDefaults: [MenuExtrasWithDefault] 227 | 228 | \\"\\"\\"The person’s name\\"\\"\\" 229 | name: String! 230 | 231 | \\"\\"\\"The person’s initials\\"\\"\\" 232 | nameInitials: String 233 | 234 | \\"\\"\\" 235 | A globally unique identifier. Can be used in various places throughout the system to identify this single value. 236 | \\"\\"\\" 237 | nodeId: ID! 238 | } 239 | 240 | \\"\\"\\" 241 | A condition to be used against \`Person\` object types. All fields are tested for equality and combined with a logical ‘and.’ 242 | \\"\\"\\" 243 | input PersonCondition { 244 | \\"\\"\\"Checks for equality with the object’s \`avatarKey\` field.\\"\\"\\" 245 | avatarKey: String 246 | 247 | \\"\\"\\"Checks for equality with the object’s \`email\` field.\\"\\"\\" 248 | email: String 249 | 250 | \\"\\"\\"Checks for equality with the object’s \`id\` field.\\"\\"\\" 251 | id: Int 252 | 253 | \\"\\"\\"Checks for equality with the object’s \`menuExtras\` field.\\"\\"\\" 254 | menuExtras: [MenuExtrasType] 255 | 256 | \\"\\"\\"Checks for equality with the object’s \`name\` field.\\"\\"\\" 257 | name: String 258 | } 259 | 260 | \\"\\"\\"An input for mutations affecting \`Person\`\\"\\"\\" 261 | input PersonInput { 262 | avatarKey: String 263 | 264 | \\"\\"\\"The person’s email\\"\\"\\" 265 | email: String! 266 | id: Int 267 | menuExtras: [MenuExtrasType] 268 | 269 | \\"\\"\\"The person’s name\\"\\"\\" 270 | name: String! 271 | } 272 | 273 | \\"\\"\\" 274 | Represents an update to a \`Person\`. Fields that are set will be updated. 275 | \\"\\"\\" 276 | input PersonPatch { 277 | avatarKey: String 278 | 279 | \\"\\"\\"The person’s email\\"\\"\\" 280 | email: String 281 | id: Int 282 | menuExtras: [MenuExtrasType] 283 | 284 | \\"\\"\\"The person’s name\\"\\"\\" 285 | name: String 286 | } 287 | 288 | \\"\\"\\"The root query type which gives access points into the data universe.\\"\\"\\" 289 | type Query implements Node { 290 | \\"\\"\\"Reads and enables pagination through a set of \`Person\`.\\"\\"\\" 291 | allPeople( 292 | \\"\\"\\"Read all values in the set after (below) this cursor.\\"\\"\\" 293 | after: Cursor 294 | 295 | \\"\\"\\"Read all values in the set before (above) this cursor.\\"\\"\\" 296 | before: Cursor 297 | 298 | \\"\\"\\" 299 | A condition to be used in determining which values should be returned by the collection. 300 | \\"\\"\\" 301 | condition: PersonCondition 302 | 303 | \\"\\"\\"Only read the first \`n\` values of the set.\\"\\"\\" 304 | first: Int 305 | 306 | \\"\\"\\"Only read the last \`n\` values of the set.\\"\\"\\" 307 | last: Int 308 | 309 | \\"\\"\\" 310 | Skip the first \`n\` values from our \`after\` cursor, an alternative to cursor 311 | based pagination. May not be used with \`last\`. 312 | \\"\\"\\" 313 | offset: Int 314 | 315 | \\"\\"\\"The method to use when ordering \`Person\`.\\"\\"\\" 316 | orderBy: [PeopleOrderBy!] = [PRIMARY_KEY_ASC] 317 | ): PeopleConnection 318 | 319 | \\"\\"\\"Fetches an object given its globally unique \`ID\`.\\"\\"\\" 320 | node( 321 | \\"\\"\\"The globally unique \`ID\`.\\"\\"\\" 322 | nodeId: ID! 323 | ): Node 324 | 325 | \\"\\"\\" 326 | The root query type must be a \`Node\` to work well with Relay 1 mutations. This just resolves to \`query\`. 327 | \\"\\"\\" 328 | nodeId: ID! 329 | 330 | \\"\\"\\"Reads a single \`Person\` using its globally unique \`ID\`.\\"\\"\\" 331 | person( 332 | \\"\\"\\"The globally unique \`ID\` to be used in selecting a single \`Person\`.\\"\\"\\" 333 | nodeId: ID! 334 | ): Person 335 | personById(id: Int!): Person 336 | 337 | \\"\\"\\" 338 | Exposes the root query type nested one level down. This is helpful for Relay 1 339 | which can only query top level fields if they are in a particular form. 340 | \\"\\"\\" 341 | query: Query! 342 | } 343 | 344 | \\"\\"\\"All input for the \`updatePersonById\` mutation.\\"\\"\\" 345 | input UpdatePersonByIdInput { 346 | \\"\\"\\" 347 | An arbitrary string value with no semantic meaning. Will be included in the 348 | payload verbatim. May be used to track mutations by the client. 349 | \\"\\"\\" 350 | clientMutationId: String 351 | id: Int! 352 | 353 | \\"\\"\\" 354 | An object where the defined keys will be set on the \`Person\` being updated. 355 | \\"\\"\\" 356 | personPatch: PersonPatch! 357 | } 358 | 359 | \\"\\"\\"All input for the \`updatePerson\` mutation.\\"\\"\\" 360 | input UpdatePersonInput { 361 | \\"\\"\\" 362 | An arbitrary string value with no semantic meaning. Will be included in the 363 | payload verbatim. May be used to track mutations by the client. 364 | \\"\\"\\" 365 | clientMutationId: String 366 | 367 | \\"\\"\\" 368 | The globally unique \`ID\` which will identify a single \`Person\` to be updated. 369 | \\"\\"\\" 370 | nodeId: ID! 371 | 372 | \\"\\"\\" 373 | An object where the defined keys will be set on the \`Person\` being updated. 374 | \\"\\"\\" 375 | personPatch: PersonPatch! 376 | } 377 | 378 | \\"\\"\\"The output of our update \`Person\` mutation.\\"\\"\\" 379 | type UpdatePersonPayload { 380 | \\"\\"\\" 381 | The exact same \`clientMutationId\` that was provided in the mutation input, 382 | unchanged and unused. May be used by a client to track mutations. 383 | \\"\\"\\" 384 | clientMutationId: String 385 | 386 | \\"\\"\\"The \`Person\` that was updated by this mutation.\\"\\"\\" 387 | person: Person 388 | 389 | \\"\\"\\"An edge for our \`Person\`. May be used by Relay 1.\\"\\"\\" 390 | personEdge( 391 | \\"\\"\\"The method to use when ordering \`Person\`.\\"\\"\\" 392 | orderBy: [PeopleOrderBy!] = [PRIMARY_KEY_ASC] 393 | ): PeopleEdge 394 | 395 | \\"\\"\\" 396 | Our root query field type. Allows us to run any query from our mutation payload. 397 | \\"\\"\\" 398 | query: Query 399 | } 400 | " 401 | `; 402 | 403 | exports[`prints a schema without the plugin 1`] = ` 404 | "\\"\\"\\"All input for the create \`Person\` mutation.\\"\\"\\" 405 | input CreatePersonInput { 406 | \\"\\"\\" 407 | An arbitrary string value with no semantic meaning. Will be included in the 408 | payload verbatim. May be used to track mutations by the client. 409 | \\"\\"\\" 410 | clientMutationId: String 411 | 412 | \\"\\"\\"The \`Person\` to be created by this mutation.\\"\\"\\" 413 | person: PersonInput! 414 | } 415 | 416 | \\"\\"\\"The output of our create \`Person\` mutation.\\"\\"\\" 417 | type CreatePersonPayload { 418 | \\"\\"\\" 419 | The exact same \`clientMutationId\` that was provided in the mutation input, 420 | unchanged and unused. May be used by a client to track mutations. 421 | \\"\\"\\" 422 | clientMutationId: String 423 | 424 | \\"\\"\\"The \`Person\` that was created by this mutation.\\"\\"\\" 425 | person: Person 426 | 427 | \\"\\"\\"An edge for our \`Person\`. May be used by Relay 1.\\"\\"\\" 428 | personEdge( 429 | \\"\\"\\"The method to use when ordering \`Person\`.\\"\\"\\" 430 | orderBy: [PeopleOrderBy!] = [PRIMARY_KEY_ASC] 431 | ): PeopleEdge 432 | 433 | \\"\\"\\" 434 | Our root query field type. Allows us to run any query from our mutation payload. 435 | \\"\\"\\" 436 | query: Query 437 | } 438 | 439 | \\"\\"\\"A location in a connection that can be used for resuming pagination.\\"\\"\\" 440 | scalar Cursor 441 | 442 | \\"\\"\\"All input for the \`deletePersonById\` mutation.\\"\\"\\" 443 | input DeletePersonByIdInput { 444 | \\"\\"\\" 445 | An arbitrary string value with no semantic meaning. Will be included in the 446 | payload verbatim. May be used to track mutations by the client. 447 | \\"\\"\\" 448 | clientMutationId: String 449 | id: Int! 450 | } 451 | 452 | \\"\\"\\"All input for the \`deletePerson\` mutation.\\"\\"\\" 453 | input DeletePersonInput { 454 | \\"\\"\\" 455 | An arbitrary string value with no semantic meaning. Will be included in the 456 | payload verbatim. May be used to track mutations by the client. 457 | \\"\\"\\" 458 | clientMutationId: String 459 | 460 | \\"\\"\\" 461 | The globally unique \`ID\` which will identify a single \`Person\` to be deleted. 462 | \\"\\"\\" 463 | nodeId: ID! 464 | } 465 | 466 | \\"\\"\\"The output of our delete \`Person\` mutation.\\"\\"\\" 467 | type DeletePersonPayload { 468 | \\"\\"\\" 469 | The exact same \`clientMutationId\` that was provided in the mutation input, 470 | unchanged and unused. May be used by a client to track mutations. 471 | \\"\\"\\" 472 | clientMutationId: String 473 | deletedPersonId: ID 474 | 475 | \\"\\"\\"The \`Person\` that was deleted by this mutation.\\"\\"\\" 476 | person: Person 477 | 478 | \\"\\"\\"An edge for our \`Person\`. May be used by Relay 1.\\"\\"\\" 479 | personEdge( 480 | \\"\\"\\"The method to use when ordering \`Person\`.\\"\\"\\" 481 | orderBy: [PeopleOrderBy!] = [PRIMARY_KEY_ASC] 482 | ): PeopleEdge 483 | 484 | \\"\\"\\" 485 | Our root query field type. Allows us to run any query from our mutation payload. 486 | \\"\\"\\" 487 | query: Query 488 | } 489 | 490 | enum MenuExtrasType { 491 | EMAILS 492 | LOCATIONS 493 | LOGO 494 | PHONES 495 | SEARCH 496 | } 497 | 498 | \\"\\"\\" 499 | The root mutation type which contains root level fields which mutate data. 500 | \\"\\"\\" 501 | type Mutation { 502 | \\"\\"\\"Creates a single \`Person\`.\\"\\"\\" 503 | createPerson( 504 | \\"\\"\\" 505 | The exclusive input argument for this mutation. An object type, make sure to see documentation for this object’s fields. 506 | \\"\\"\\" 507 | input: CreatePersonInput! 508 | ): CreatePersonPayload 509 | 510 | \\"\\"\\"Deletes a single \`Person\` using its globally unique id.\\"\\"\\" 511 | deletePerson( 512 | \\"\\"\\" 513 | The exclusive input argument for this mutation. An object type, make sure to see documentation for this object’s fields. 514 | \\"\\"\\" 515 | input: DeletePersonInput! 516 | ): DeletePersonPayload 517 | 518 | \\"\\"\\"Deletes a single \`Person\` using a unique key.\\"\\"\\" 519 | deletePersonById( 520 | \\"\\"\\" 521 | The exclusive input argument for this mutation. An object type, make sure to see documentation for this object’s fields. 522 | \\"\\"\\" 523 | input: DeletePersonByIdInput! 524 | ): DeletePersonPayload 525 | 526 | \\"\\"\\"Updates a single \`Person\` using its globally unique id and a patch.\\"\\"\\" 527 | updatePerson( 528 | \\"\\"\\" 529 | The exclusive input argument for this mutation. An object type, make sure to see documentation for this object’s fields. 530 | \\"\\"\\" 531 | input: UpdatePersonInput! 532 | ): UpdatePersonPayload 533 | 534 | \\"\\"\\"Updates a single \`Person\` using a unique key and a patch.\\"\\"\\" 535 | updatePersonById( 536 | \\"\\"\\" 537 | The exclusive input argument for this mutation. An object type, make sure to see documentation for this object’s fields. 538 | \\"\\"\\" 539 | input: UpdatePersonByIdInput! 540 | ): UpdatePersonPayload 541 | } 542 | 543 | \\"\\"\\"An object with a globally unique \`ID\`.\\"\\"\\" 544 | interface Node { 545 | \\"\\"\\" 546 | A globally unique identifier. Can be used in various places throughout the system to identify this single value. 547 | \\"\\"\\" 548 | nodeId: ID! 549 | } 550 | 551 | \\"\\"\\"Information about pagination in a connection.\\"\\"\\" 552 | type PageInfo { 553 | \\"\\"\\"When paginating forwards, the cursor to continue.\\"\\"\\" 554 | endCursor: Cursor 555 | 556 | \\"\\"\\"When paginating forwards, are there more items?\\"\\"\\" 557 | hasNextPage: Boolean! 558 | 559 | \\"\\"\\"When paginating backwards, are there more items?\\"\\"\\" 560 | hasPreviousPage: Boolean! 561 | 562 | \\"\\"\\"When paginating backwards, the cursor to continue.\\"\\"\\" 563 | startCursor: Cursor 564 | } 565 | 566 | \\"\\"\\"A connection to a list of \`Person\` values.\\"\\"\\" 567 | type PeopleConnection { 568 | \\"\\"\\" 569 | A list of edges which contains the \`Person\` and cursor to aid in pagination. 570 | \\"\\"\\" 571 | edges: [PeopleEdge!]! 572 | 573 | \\"\\"\\"A list of \`Person\` objects.\\"\\"\\" 574 | nodes: [Person]! 575 | 576 | \\"\\"\\"Information to aid in pagination.\\"\\"\\" 577 | pageInfo: PageInfo! 578 | 579 | \\"\\"\\"The count of *all* \`Person\` you could get from the connection.\\"\\"\\" 580 | totalCount: Int 581 | } 582 | 583 | \\"\\"\\"A \`Person\` edge in the connection.\\"\\"\\" 584 | type PeopleEdge { 585 | \\"\\"\\"A cursor for use in pagination.\\"\\"\\" 586 | cursor: Cursor 587 | 588 | \\"\\"\\"The \`Person\` at the end of the edge.\\"\\"\\" 589 | node: Person 590 | } 591 | 592 | \\"\\"\\"Methods to use when ordering \`Person\`.\\"\\"\\" 593 | enum PeopleOrderBy { 594 | AVATAR_KEY_ASC 595 | AVATAR_KEY_DESC 596 | EMAIL_ASC 597 | EMAIL_DESC 598 | ID_ASC 599 | ID_DESC 600 | MENU_EXTRAS_ASC 601 | MENU_EXTRAS_DESC 602 | NAME_ASC 603 | NAME_DESC 604 | NATURAL 605 | PRIMARY_KEY_ASC 606 | PRIMARY_KEY_DESC 607 | } 608 | 609 | type Person implements Node { 610 | avatarKey: String 611 | 612 | \\"\\"\\"The person’s email\\"\\"\\" 613 | email: String! 614 | id: Int! 615 | menuExtras: [MenuExtrasType] 616 | 617 | \\"\\"\\"The person’s name\\"\\"\\" 618 | name: String! 619 | 620 | \\"\\"\\" 621 | A globally unique identifier. Can be used in various places throughout the system to identify this single value. 622 | \\"\\"\\" 623 | nodeId: ID! 624 | } 625 | 626 | \\"\\"\\" 627 | A condition to be used against \`Person\` object types. All fields are tested for equality and combined with a logical ‘and.’ 628 | \\"\\"\\" 629 | input PersonCondition { 630 | \\"\\"\\"Checks for equality with the object’s \`avatarKey\` field.\\"\\"\\" 631 | avatarKey: String 632 | 633 | \\"\\"\\"Checks for equality with the object’s \`email\` field.\\"\\"\\" 634 | email: String 635 | 636 | \\"\\"\\"Checks for equality with the object’s \`id\` field.\\"\\"\\" 637 | id: Int 638 | 639 | \\"\\"\\"Checks for equality with the object’s \`menuExtras\` field.\\"\\"\\" 640 | menuExtras: [MenuExtrasType] 641 | 642 | \\"\\"\\"Checks for equality with the object’s \`name\` field.\\"\\"\\" 643 | name: String 644 | } 645 | 646 | \\"\\"\\"An input for mutations affecting \`Person\`\\"\\"\\" 647 | input PersonInput { 648 | avatarKey: String 649 | 650 | \\"\\"\\"The person’s email\\"\\"\\" 651 | email: String! 652 | id: Int 653 | menuExtras: [MenuExtrasType] 654 | 655 | \\"\\"\\"The person’s name\\"\\"\\" 656 | name: String! 657 | } 658 | 659 | \\"\\"\\" 660 | Represents an update to a \`Person\`. Fields that are set will be updated. 661 | \\"\\"\\" 662 | input PersonPatch { 663 | avatarKey: String 664 | 665 | \\"\\"\\"The person’s email\\"\\"\\" 666 | email: String 667 | id: Int 668 | menuExtras: [MenuExtrasType] 669 | 670 | \\"\\"\\"The person’s name\\"\\"\\" 671 | name: String 672 | } 673 | 674 | \\"\\"\\"The root query type which gives access points into the data universe.\\"\\"\\" 675 | type Query implements Node { 676 | \\"\\"\\"Reads and enables pagination through a set of \`Person\`.\\"\\"\\" 677 | allPeople( 678 | \\"\\"\\"Read all values in the set after (below) this cursor.\\"\\"\\" 679 | after: Cursor 680 | 681 | \\"\\"\\"Read all values in the set before (above) this cursor.\\"\\"\\" 682 | before: Cursor 683 | 684 | \\"\\"\\" 685 | A condition to be used in determining which values should be returned by the collection. 686 | \\"\\"\\" 687 | condition: PersonCondition 688 | 689 | \\"\\"\\"Only read the first \`n\` values of the set.\\"\\"\\" 690 | first: Int 691 | 692 | \\"\\"\\"Only read the last \`n\` values of the set.\\"\\"\\" 693 | last: Int 694 | 695 | \\"\\"\\" 696 | Skip the first \`n\` values from our \`after\` cursor, an alternative to cursor 697 | based pagination. May not be used with \`last\`. 698 | \\"\\"\\" 699 | offset: Int 700 | 701 | \\"\\"\\"The method to use when ordering \`Person\`.\\"\\"\\" 702 | orderBy: [PeopleOrderBy!] = [PRIMARY_KEY_ASC] 703 | ): PeopleConnection 704 | 705 | \\"\\"\\"Fetches an object given its globally unique \`ID\`.\\"\\"\\" 706 | node( 707 | \\"\\"\\"The globally unique \`ID\`.\\"\\"\\" 708 | nodeId: ID! 709 | ): Node 710 | 711 | \\"\\"\\" 712 | The root query type must be a \`Node\` to work well with Relay 1 mutations. This just resolves to \`query\`. 713 | \\"\\"\\" 714 | nodeId: ID! 715 | 716 | \\"\\"\\"Reads a single \`Person\` using its globally unique \`ID\`.\\"\\"\\" 717 | person( 718 | \\"\\"\\"The globally unique \`ID\` to be used in selecting a single \`Person\`.\\"\\"\\" 719 | nodeId: ID! 720 | ): Person 721 | personById(id: Int!): Person 722 | 723 | \\"\\"\\" 724 | Exposes the root query type nested one level down. This is helpful for Relay 1 725 | which can only query top level fields if they are in a particular form. 726 | \\"\\"\\" 727 | query: Query! 728 | } 729 | 730 | \\"\\"\\"All input for the \`updatePersonById\` mutation.\\"\\"\\" 731 | input UpdatePersonByIdInput { 732 | \\"\\"\\" 733 | An arbitrary string value with no semantic meaning. Will be included in the 734 | payload verbatim. May be used to track mutations by the client. 735 | \\"\\"\\" 736 | clientMutationId: String 737 | id: Int! 738 | 739 | \\"\\"\\" 740 | An object where the defined keys will be set on the \`Person\` being updated. 741 | \\"\\"\\" 742 | personPatch: PersonPatch! 743 | } 744 | 745 | \\"\\"\\"All input for the \`updatePerson\` mutation.\\"\\"\\" 746 | input UpdatePersonInput { 747 | \\"\\"\\" 748 | An arbitrary string value with no semantic meaning. Will be included in the 749 | payload verbatim. May be used to track mutations by the client. 750 | \\"\\"\\" 751 | clientMutationId: String 752 | 753 | \\"\\"\\" 754 | The globally unique \`ID\` which will identify a single \`Person\` to be updated. 755 | \\"\\"\\" 756 | nodeId: ID! 757 | 758 | \\"\\"\\" 759 | An object where the defined keys will be set on the \`Person\` being updated. 760 | \\"\\"\\" 761 | personPatch: PersonPatch! 762 | } 763 | 764 | \\"\\"\\"The output of our update \`Person\` mutation.\\"\\"\\" 765 | type UpdatePersonPayload { 766 | \\"\\"\\" 767 | The exact same \`clientMutationId\` that was provided in the mutation input, 768 | unchanged and unused. May be used by a client to track mutations. 769 | \\"\\"\\" 770 | clientMutationId: String 771 | 772 | \\"\\"\\"The \`Person\` that was updated by this mutation.\\"\\"\\" 773 | person: Person 774 | 775 | \\"\\"\\"An edge for our \`Person\`. May be used by Relay 1.\\"\\"\\" 776 | personEdge( 777 | \\"\\"\\"The method to use when ordering \`Person\`.\\"\\"\\" 778 | orderBy: [PeopleOrderBy!] = [PRIMARY_KEY_ASC] 779 | ): PeopleEdge 780 | 781 | \\"\\"\\" 782 | Our root query field type. Allows us to run any query from our mutation payload. 783 | \\"\\"\\" 784 | query: Query 785 | } 786 | " 787 | `; 788 | -------------------------------------------------------------------------------- /__tests__/integration/derivedFieldDefinitions.js: -------------------------------------------------------------------------------- 1 | const { upperFirst } = require("graphile-build-pg"); 2 | 3 | module.exports = [ 4 | { 5 | identifiers: ["p.person.name"], 6 | inflect: fieldName => `${fieldName}Initials`, 7 | resolve: name => name.split(" ").reduce((p, c) => p + c.substr(0, 1), ""), 8 | description: "The person’s initials", 9 | }, 10 | { 11 | identifiers: [ 12 | { 13 | table: "p.person", 14 | columns: ["name", "email"], 15 | }, 16 | ], 17 | inflect: (...fieldNames) => 18 | `combined${fieldNames.map(upperFirst).join("And")}`, 19 | resolve: (name, email) => `${name} (${email})`, 20 | description: "The person’s name and email", 21 | }, 22 | { 23 | identifiers: ["p.person.name"], 24 | inflect: () => "hasName", 25 | resolve: name => typeof name === "string" && name !== "", 26 | returnTypeName: "Boolean", 27 | }, 28 | { 29 | identifiers: [ 30 | { 31 | tag: "key", 32 | }, 33 | ], 34 | inflect: fieldName => fieldName.replace("Key", "Url"), 35 | resolve: key => `https://example.com/${key}`, 36 | }, 37 | { 38 | identifiers: [ 39 | { 40 | table: "p.person", 41 | columns: ["menu_extras"], 42 | }, 43 | ], 44 | type: build => { 45 | const { 46 | getTypeByName, 47 | graphql: { GraphQLList }, 48 | } = build; 49 | const compositeTypeName = "MenuExtrasWithDefault"; 50 | const compositeType = getTypeByName(compositeTypeName); 51 | if (!compositeType) 52 | throw new Error(`Could not find composite type '${compositeTypeName}'`); 53 | return new GraphQLList(compositeType); 54 | }, 55 | inflect: fieldName => `${fieldName}WithDefaults`, 56 | resolve: () => { 57 | return []; 58 | }, 59 | }, 60 | ]; 61 | -------------------------------------------------------------------------------- /__tests__/integration/queries.test.js: -------------------------------------------------------------------------------- 1 | const { graphql } = require("graphql"); 2 | const { withPgClient } = require("../helpers"); 3 | const { createPostGraphQLSchema } = require("postgraphile-core"); 4 | const { readdirSync, readFile: rawReadFile } = require("fs"); 5 | const { resolve: resolvePath } = require("path"); 6 | const { printSchema } = require("graphql/utilities"); 7 | const debug = require("debug")("graphile-build:schema"); 8 | 9 | function readFile(filename, encoding) { 10 | return new Promise((resolve, reject) => { 11 | rawReadFile(filename, encoding, (err, res) => { 12 | if (err) reject(err); 13 | else resolve(res); 14 | }); 15 | }); 16 | } 17 | 18 | const queriesDir = `${__dirname}/../fixtures/queries`; 19 | const queryFileNames = readdirSync(queriesDir); 20 | let queryResults = []; 21 | 22 | const pluginTestData = () => 23 | readFile(`${__dirname}/../plugin-test-data.sql`, "utf8"); 24 | 25 | beforeAll(() => { 26 | // Get a few GraphQL schema instance that we can query. 27 | const gqlSchemasPromise = withPgClient(async pgClient => { 28 | // Different fixtures need different schemas with different configurations. 29 | // Make all of the different schemas with different configurations that we 30 | // need and wait for them to be created in parallel. 31 | const [normal] = await Promise.all([ 32 | createPostGraphQLSchema(pgClient, ["p"], { 33 | appendPlugins: [require("../../index.js")], 34 | graphileBuildOptions: { 35 | derivedFieldDefinitions: require("./derivedFieldDefinitions"), 36 | }, 37 | }), 38 | ]); 39 | debug(printSchema(normal)); 40 | return { 41 | normal, 42 | }; 43 | }); 44 | 45 | // Execute all of the queries in parallel. We will not wait for them to 46 | // resolve or reject. The tests will do that. 47 | // 48 | // All of our queries share a single client instance. 49 | const queryResultsPromise = (async () => { 50 | // Wait for the schema to resolve. We need the schema to be introspected 51 | // before we can do anything else! 52 | const gqlSchemas = await gqlSchemasPromise; 53 | // Get a new Postgres client instance. 54 | return await withPgClient(async pgClient => { 55 | // Add data to the client instance we are using. 56 | await pgClient.query(await pluginTestData()); 57 | // Run all of our queries in parallel. 58 | return await Promise.all( 59 | queryFileNames.map(async fileName => { 60 | // Read the query from the file system. 61 | const query = await readFile( 62 | resolvePath(queriesDir, fileName), 63 | "utf8" 64 | ); 65 | // Get the appropriate GraphQL schema for this fixture. We want to test 66 | // some specific fixtures against a schema configured slightly 67 | // differently. 68 | const gqlSchema = 69 | fileName === "classic-ids.graphql" 70 | ? gqlSchemas.classicIds 71 | : fileName === "dynamic-json.graphql" 72 | ? gqlSchemas.dynamicJson 73 | : gqlSchemas.normal; 74 | // Return the result of our GraphQL query. 75 | const result = await graphql(gqlSchema, query, null, { 76 | pgClient: pgClient, 77 | }); 78 | if (result.errors) { 79 | console.log(result.errors.map(e => e.originalError)); 80 | } 81 | return result; 82 | }) 83 | ); 84 | }); 85 | })(); 86 | 87 | // Flatten out the query results promise. 88 | queryResults = queryFileNames.map(async (_, i) => { 89 | return await (await queryResultsPromise)[i]; 90 | }); 91 | }); 92 | 93 | for (let i = 0; i < queryFileNames.length; i++) { 94 | test(queryFileNames[i], async () => { 95 | expect(await queryResults[i]).toMatchSnapshot(); 96 | }); 97 | } 98 | -------------------------------------------------------------------------------- /__tests__/integration/schema.test.js: -------------------------------------------------------------------------------- 1 | // TODO: There may be some excessive waste, if we could somehow filter what 2 | // these guys see, that would be great 👍 3 | 4 | const printSchemaOrdered = require("../printSchemaOrdered"); 5 | const { withPgClient } = require("../helpers"); 6 | const { createPostGraphQLSchema } = require("postgraphile-core"); 7 | 8 | // This test suite can be flaky. Increase it’s timeout. 9 | jest.setTimeout(1000 * 20); 10 | 11 | let testResults; 12 | 13 | const testFixtures = [ 14 | { 15 | name: "prints a schema without the plugin", 16 | createSchema: client => createPostGraphQLSchema(client, ["p"], {}), 17 | }, 18 | { 19 | name: "prints a schema with the plugin and some derived field definitions", 20 | createSchema: client => 21 | createPostGraphQLSchema(client, ["p"], { 22 | appendPlugins: [require("../../index.js")], 23 | graphileBuildOptions: { 24 | derivedFieldDefinitions: require("./derivedFieldDefinitions"), 25 | }, 26 | }), 27 | }, 28 | ]; 29 | 30 | beforeAll(() => { 31 | testResults = testFixtures.map(testFixture => 32 | withPgClient(async client => { 33 | return await testFixture.createSchema(client); 34 | }) 35 | ); 36 | }); 37 | 38 | for (let i = 0; i < testFixtures.length; i++) { 39 | test(testFixtures[i].name, async () => { 40 | expect(printSchemaOrdered(await testResults[i])).toMatchSnapshot(); 41 | }); 42 | } 43 | -------------------------------------------------------------------------------- /__tests__/plugin-test-data.sql: -------------------------------------------------------------------------------- 1 | insert into p.person (id, name, email, avatar_key) values 2 | (1, 'John Smith', 'john.smith@email.com', '15781368'), 3 | (2, 'Sara Smith', 'sara.smith@email.com', '19883378'); -------------------------------------------------------------------------------- /__tests__/plugin-test-schema.sql: -------------------------------------------------------------------------------- 1 | drop schema if exists p cascade; 2 | 3 | create schema p; 4 | 5 | create type p.menu_extras_type as enum ( 6 | 'logo', 7 | 'emails', 8 | 'locations', 9 | 'phones', 10 | 'search' 11 | ); 12 | 13 | create type p.menu_extras_with_defaults as ( 14 | menu_extra p.menu_extras_type, 15 | is_enabled boolean 16 | ); 17 | 18 | create table p.person ( 19 | id serial primary key, 20 | name text not null, 21 | email text not null, 22 | avatar_key text, 23 | menu_extras p.menu_extras_type[] 24 | ); 25 | 26 | comment on column p.person.name is 'The person’s name'; 27 | comment on column p.person.email is 'The person’s email'; 28 | comment on column p.person.avatar_key is '@key'; 29 | -------------------------------------------------------------------------------- /__tests__/printSchemaOrdered.js: -------------------------------------------------------------------------------- 1 | const { parse, buildASTSchema } = require("graphql"); 2 | const { printSchema } = require("graphql/utilities"); 3 | 4 | module.exports = function printSchemaOrdered(originalSchema) { 5 | // Clone schema so we don't damage anything 6 | const schema = buildASTSchema(parse(printSchema(originalSchema))); 7 | 8 | const typeMap = schema.getTypeMap(); 9 | Object.keys(typeMap).forEach(name => { 10 | const gqlType = typeMap[name]; 11 | 12 | // Object? 13 | if (gqlType.getFields) { 14 | const fields = gqlType.getFields(); 15 | const keys = Object.keys(fields).sort(); 16 | keys.forEach(key => { 17 | const value = fields[key]; 18 | 19 | // Move the key to the end of the object 20 | delete fields[key]; 21 | fields[key] = value; 22 | 23 | // Sort args 24 | if (value.args) { 25 | value.args.sort((a, b) => a.name.localeCompare(b.name)); 26 | } 27 | }); 28 | } 29 | 30 | // Enum? 31 | if (gqlType.getValues) { 32 | gqlType.getValues().sort((a, b) => a.name.localeCompare(b.name)); 33 | } 34 | }); 35 | 36 | return printSchema(schema); 37 | }; 38 | -------------------------------------------------------------------------------- /index.js: -------------------------------------------------------------------------------- 1 | module.exports = function PostGraphileDerivedFieldPlugin(builder, options) { 2 | require("./src/DerivedFieldPlugin.js")(builder, options); 3 | }; 4 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "postgraphile-plugin-derived-field", 3 | "version": "1.0.0-alpha.7", 4 | "description": "Add derived fields in PostGraphile", 5 | "main": "index.js", 6 | "scripts": { 7 | "lint": "eslint .", 8 | "test": "scripts/test" 9 | }, 10 | "repository": { 11 | "type": "git", 12 | "url": "git+https://github.com/mattbretl/postgraphile-plugin-derived-field.git" 13 | }, 14 | "author": "Matt Bretl", 15 | "license": "MIT", 16 | "bugs": { 17 | "url": "https://github.com/mattbretl/postgraphile-plugin-derived-field/issues" 18 | }, 19 | "devDependencies": { 20 | "eslint": "^6.8.0", 21 | "eslint-config-prettier": "^6.11.0", 22 | "eslint-plugin-jest": "^23.8.2", 23 | "eslint-plugin-prettier": "^3.1.3", 24 | "graphql": "^14.6.0", 25 | "jest": "^25.5.0", 26 | "pg": "^7.18.2", 27 | "postgraphile-core": "4.0.0", 28 | "prettier": "1.19.1" 29 | }, 30 | "jest": { 31 | "testRegex": "__tests__/.*\\.test\\.js$" 32 | }, 33 | "files": [ 34 | "src" 35 | ], 36 | "engines": { 37 | "node": ">=10" 38 | } 39 | } 40 | -------------------------------------------------------------------------------- /scripts/test: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | set -e 3 | 4 | if [ -x ".env" ]; then 5 | set -a 6 | . ./.env 7 | set +a 8 | fi; 9 | 10 | if [ "$TEST_DATABASE_URL" == "" ]; then 11 | echo "ERROR: No test database configured; aborting" 12 | echo 13 | echo "To resolve this, ensure environmental variable TEST_DATABASE_URL is set" 14 | exit 1; 15 | fi; 16 | 17 | # Import latest schema (throw on error) 18 | psql -Xqv ON_ERROR_STOP=1 -f __tests__/plugin-test-schema.sql "$TEST_DATABASE_URL" 19 | echo "Database reset successfully ✅" 20 | 21 | # Now run the tests 22 | jest -i $@ 23 | -------------------------------------------------------------------------------- /src/DerivedFieldPlugin.js: -------------------------------------------------------------------------------- 1 | function DerivedFieldPlugin(builder, { derivedFieldDefinitions }) { 2 | builder.hook("GraphQLObjectType:fields", (fields, build, context) => { 3 | const { 4 | extend, 5 | getTypeByName, 6 | pgIntrospectionResultsByKind: introspectionResultsByKind, 7 | graphql: { GraphQLString, GraphQLInt, GraphQLFloat, GraphQLBoolean }, 8 | inflection, 9 | fieldDataGeneratorsByFieldNameByType, 10 | } = build; 11 | const { 12 | scope: { pgIntrospection: table, isPgRowType }, 13 | fieldWithHooks, 14 | Self, 15 | } = context; 16 | 17 | if ( 18 | !isPgRowType || 19 | !table || 20 | table.kind !== "class" || 21 | derivedFieldDefinitions == null 22 | ) { 23 | return fields; 24 | } 25 | 26 | const tableType = introspectionResultsByKind.type.filter( 27 | type => 28 | type.type === "c" && 29 | type.namespaceId === table.namespaceId && 30 | type.classId === table.id 31 | )[0]; 32 | if (!tableType) { 33 | throw new Error("Could not determine the type for this table"); 34 | } 35 | 36 | const derivedFields = derivedFieldDefinitions 37 | .map(def => { 38 | if (def.identifiers == null) { 39 | throw new Error( 40 | `Derived field definitions must include 'identifiers'` 41 | ); 42 | } 43 | return Object.assign({}, def, { 44 | identifiers: filterIdentifiers(table, def.identifiers), 45 | }); 46 | }) 47 | .reduce((memo, def) => { 48 | const addDerivedField = columns => { 49 | const attrs = introspectionResultsByKind.attribute 50 | .filter(attr => attr.classId === table.id) 51 | .filter(attr => columns.includes(attr.name)); 52 | const fieldNames = attrs.map(attr => inflection.column(attr)); 53 | const derivedFieldName = def.inflect(...fieldNames); 54 | if (memo[derivedFieldName]) { 55 | throw new Error( 56 | `Derived field '${derivedFieldName}' conflicts with existing GraphQL field` 57 | ); 58 | } 59 | memo[derivedFieldName] = fieldWithHooks( 60 | derivedFieldName, 61 | ({ addDataGenerator }) => { 62 | const generatorsByField = fieldDataGeneratorsByFieldNameByType.get( 63 | Self 64 | ); 65 | const setAlias = (fn, fieldName) => { 66 | return (...args) => { 67 | args[0].alias = fieldName; 68 | return fn(...args); 69 | }; 70 | }; 71 | for (let fieldName of fieldNames) { 72 | for (let g of generatorsByField[fieldName]) { 73 | addDataGenerator(setAlias(g, fieldName)); 74 | } 75 | } 76 | const scalarTypes = { 77 | String: GraphQLString, 78 | Int: GraphQLInt, 79 | Float: GraphQLFloat, 80 | Boolean: GraphQLBoolean, 81 | }; 82 | let type; 83 | if (typeof def.type === "string") { 84 | type = getTypeByName(def.type) || scalarTypes[def.type]; 85 | if (!type) { 86 | throw new Error( 87 | "Derived field definition requires 'type' string to be a valid GraphQL type name" 88 | ); 89 | } 90 | } else if (typeof def.type === "function") { 91 | type = def.type(build); 92 | if (!type) { 93 | throw new Error( 94 | "Derived field definition requires 'type' function to return a valid GraphQL type" 95 | ); 96 | } 97 | } else if (def.returnTypeName) { 98 | // DEPRECATED 99 | type = 100 | getTypeByName(def.returnTypeName) || 101 | scalarTypes[def.returnTypeName]; 102 | if (!type) { 103 | throw new Error( 104 | "Derived field definition requires 'returnTypeName' string to be a valid GraphQL type name" 105 | ); 106 | } 107 | } 108 | return { 109 | type: type || GraphQLString, 110 | description: def.description, 111 | resolve: (data, args, context, info) => { 112 | if ( 113 | fieldNames.filter( 114 | n => !Object.prototype.hasOwnProperty.call(data, n) 115 | ).length > 0 116 | ) { 117 | throw new Error( 118 | `Derived field '${derivedFieldName}' could not be resolved` 119 | ); 120 | } 121 | const fieldValues = fieldNames.map( 122 | fieldName => data[fieldName] 123 | ); 124 | return def.resolve(...fieldValues, context, info); 125 | }, 126 | }; 127 | }, 128 | {} 129 | ); 130 | }; 131 | for (const ident of def.identifiers) { 132 | if (ident.columns) { 133 | addDerivedField(ident.columns); 134 | } 135 | if (ident.tag) { 136 | const columns = introspectionResultsByKind.attribute 137 | .filter(attr => attr.classId === table.id) 138 | .filter(attr => Object.keys(attr.tags).includes(ident.tag)) 139 | .map(attr => attr.name); 140 | for (const column of columns) { 141 | addDerivedField([column]); 142 | } 143 | } 144 | } 145 | return memo; 146 | }, {}); 147 | 148 | return extend( 149 | fields, 150 | derivedFields, 151 | `Adding derived field to '${Self.name}'` 152 | ); 153 | }); 154 | 155 | function filterIdentifiers(table, identifiers) { 156 | return identifiers 157 | .filter( 158 | // Exclude identifiers that reference other tables 159 | ident => 160 | !ident.table || ident.table === `${table.namespaceName}.${table.name}` 161 | ) 162 | .map(ident => { 163 | // Map tag strings to { tag } objects and column strings to { table, columns } objects 164 | if (typeof ident !== "string") { 165 | return ident; 166 | } 167 | if (ident.startsWith("@")) { 168 | return { 169 | tag: ident.substr(1), 170 | }; 171 | } else { 172 | const t = ident.substring(0, ident.lastIndexOf(".")); 173 | const c = ident.substring(ident.lastIndexOf(".") + 1, ident.length); 174 | return { 175 | table: t, 176 | columns: [c], 177 | }; 178 | } 179 | }) 180 | .filter(ident => { 181 | if ((ident.columns && ident.tag) || (!ident.columns && !ident.tag)) { 182 | throw new Error( 183 | `One (and only one) of 'columns' or 'tags' must be specified in 'identifers'` 184 | ); 185 | } 186 | return true; 187 | }); 188 | } 189 | } 190 | 191 | module.exports = DerivedFieldPlugin; 192 | --------------------------------------------------------------------------------