├── .gitignore ├── karma.conf.js ├── scripts ├── mergeSchemas.js └── updateSchema.js ├── src ├── index.js ├── utils.js ├── execute │ └── network.js ├── split.js └── merge │ └── index.js ├── data ├── local │ ├── database.js │ └── schema.js └── server │ ├── database.js │ └── schema.js ├── package.json ├── NOTES.md ├── test └── index.js └── README.md /.gitignore: -------------------------------------------------------------------------------- 1 | node_modules 2 | /lib/ 3 | schema.json 4 | config.json 5 | schema.graphql 6 | -------------------------------------------------------------------------------- /karma.conf.js: -------------------------------------------------------------------------------- 1 | var webpack = require('webpack'); 2 | 3 | module.exports = function(config) { 4 | config.set({ 5 | frameworks: ['mocha'], 6 | 7 | files: [ 8 | 'test/**/*.js' 9 | ], 10 | 11 | preprocessors: { 12 | 'test/**/*.js': ['webpack'] 13 | }, 14 | 15 | webpack: { 16 | module: { 17 | loaders: [{ 18 | test: /\.js$/, 19 | exclude: /(node_modules)/, 20 | loader: 'babel-loader', 21 | query: { 22 | plugins: ['./build/babelRelayPlugin'] 23 | } 24 | }] 25 | } 26 | } 27 | 28 | 29 | }); 30 | }; 31 | -------------------------------------------------------------------------------- /scripts/mergeSchemas.js: -------------------------------------------------------------------------------- 1 | import fs from 'fs'; 2 | import path from 'path'; 3 | 4 | import localSchema from '../data/local/schema.json'; 5 | import serverSchema from '../data/server/schema.json'; 6 | 7 | import {createCompositeSchema} from '../src/merge'; 8 | 9 | const {schema,config} = createCompositeSchema({ 10 | server: serverSchema, 11 | local: localSchema 12 | }, { 13 | queryType: 'Query', 14 | mutationType: 'Mutation' 15 | }); 16 | 17 | fs.writeFileSync( 18 | path.join(__dirname, '../data/', 'schema.json'), 19 | JSON.stringify(schema, null, 2) 20 | ); 21 | 22 | fs.writeFileSync( 23 | path.join(__dirname, '../data/', 'config.json'), 24 | JSON.stringify(config, null, 2) 25 | ); 26 | -------------------------------------------------------------------------------- /src/index.js: -------------------------------------------------------------------------------- 1 | import {createCompositeRequest,createMutationRequest} from './split'; 2 | import {executeCompositeRequests,executeCompositeMutation} from './execute/network'; 3 | 4 | export default class RelayCompositeNetworkLayer { 5 | 6 | constructor(config) { 7 | this.config = config; 8 | } 9 | 10 | sendQueries(queryRequests) { 11 | const context = {...this.config}; 12 | const compositeRequests = queryRequests.map(request => createCompositeRequest(request, context)); 13 | 14 | return executeCompositeRequests(compositeRequests, context); 15 | } 16 | 17 | sendMutation(mutationRequest) { 18 | const context = {...this.config}; 19 | const compositeMutationRequest = createMutationRequest(mutationRequest, context); 20 | 21 | return executeCompositeMutation(compositeMutationRequest, context); 22 | } 23 | 24 | supports(...options) { 25 | return false; 26 | } 27 | 28 | } 29 | -------------------------------------------------------------------------------- /data/local/database.js: -------------------------------------------------------------------------------- 1 | export class User {} 2 | export class Draft {} 3 | 4 | // const VIEWER_ID = 'me'; 5 | const VIEWER_ID = 1; 6 | 7 | var viewer = new User(); 8 | viewer.id = VIEWER_ID; 9 | var usersById = { 10 | [VIEWER_ID]: viewer 11 | }; 12 | 13 | export function getUser(id) { 14 | return usersById[id]; 15 | } 16 | 17 | 18 | var nextDraftId = 0; 19 | 20 | var draftsById = {}; 21 | var draftIdsByUser = { 22 | [VIEWER_ID]: [] 23 | }; 24 | 25 | addDraft('This is a draft', false); 26 | addDraft('This is another draft', false); 27 | 28 | export function addDraft(text, complete) { 29 | var draft = new Draft(); 30 | draft.complete = !!complete; 31 | draft.id = `${nextDraftId++}`; 32 | draft.text = text; 33 | draft.authorId = VIEWER_ID; 34 | draftsById[draft.id] = draft; 35 | draftIdsByUser[VIEWER_ID].push(draft.id); 36 | return draft.id; 37 | } 38 | 39 | export function getDraft(id) { 40 | return draftsById[id]; 41 | } 42 | 43 | export function getDrafts() { 44 | return draftIdsByUser[VIEWER_ID].map(id => draftsById[id]); 45 | } 46 | -------------------------------------------------------------------------------- /data/server/database.js: -------------------------------------------------------------------------------- 1 | export class User {} 2 | 3 | let id = 1; 4 | 5 | const usersById = {}; 6 | 7 | const addUser = (name) => { 8 | const user = new User(); 9 | user.id = id++; 10 | user.name = name; 11 | user.age = 13; 12 | user.gender = 'male'; 13 | 14 | usersById[user.id] = user; 15 | 16 | return user; 17 | } 18 | 19 | export const getViewer = () => { 20 | return viewer; 21 | } 22 | 23 | export const getUser = id => { 24 | return usersById[id]; 25 | } 26 | 27 | const viewer = addUser('Huey'); 28 | 29 | const contactsForUser = {}; 30 | const addContact = (user, contact) => { 31 | contactsForUser[user.id] = (contactsForUser[user.id] || []).concat(contact.id); 32 | } 33 | 34 | const jason = addUser('Jason'); 35 | const nate = addUser('Nate') 36 | 37 | addContact(viewer, jason); 38 | addContact(viewer, nate); 39 | addContact(viewer, addUser('Strickland')); 40 | 41 | export class Message {} 42 | 43 | const messagesById = {}; 44 | 45 | export const sendMessage = (author, recipient, text) => { 46 | const message = new Message(); 47 | message.id = id++; 48 | message.text = text; 49 | message.authorId = author.id; 50 | message.receipientId = recipient.id; 51 | 52 | messagesById[message.id] = message; 53 | 54 | return message; 55 | } 56 | 57 | sendMessage(viewer, nate, 'Howdy, here is a message'); 58 | sendMessage(viewer, jason, 'And another message'); 59 | 60 | export const getMessage = id => { 61 | return messagesById[id]; 62 | } 63 | -------------------------------------------------------------------------------- /data/server/schema.js: -------------------------------------------------------------------------------- 1 | import { 2 | GraphQLBoolean, 3 | GraphQLID, 4 | GraphQLInt, 5 | GraphQLList, 6 | GraphQLNonNull, 7 | GraphQLObjectType, 8 | GraphQLSchema, 9 | GraphQLString, 10 | GraphQLInterfaceType, 11 | } from 'graphql'; 12 | 13 | import { 14 | connectionArgs, 15 | connectionDefinitions, 16 | connectionFromArray, 17 | cursorForObjectInConnection, 18 | fromGlobalId, 19 | globalIdField, 20 | mutationWithClientMutationId, 21 | nodeDefinitions, 22 | toGlobalId, 23 | } from 'graphql-relay'; 24 | 25 | import { 26 | User, 27 | getViewer, 28 | getUser, 29 | } from './database'; 30 | 31 | const {nodeInterface, nodeField} = nodeDefinitions( 32 | (globalId) => { 33 | const {type, id} = fromGlobalId(globalId); 34 | if (type === 'User') { 35 | return getUser(id); 36 | } 37 | return null; 38 | }, 39 | (obj) => { 40 | if (obj instanceof User) { 41 | return UserType; 42 | } 43 | return null; 44 | } 45 | ); 46 | 47 | const UserType = new GraphQLObjectType({ 48 | name: 'User', 49 | fields: () => ({ 50 | id: globalIdField('User'), 51 | name: { type: GraphQLString }, 52 | age: { type: GraphQLInt }, 53 | gender: { type: GraphQLString } 54 | }), 55 | interfaces: [nodeInterface] 56 | }); 57 | 58 | var Root = new GraphQLObjectType({ 59 | name: 'Root', 60 | fields: { 61 | viewer: { 62 | type: UserType, 63 | resolve: () => getViewer() 64 | }, 65 | node: nodeField 66 | } 67 | }); 68 | 69 | export var schema = new GraphQLSchema({ 70 | query: Root 71 | }); 72 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "relay-composite-network-layer", 3 | "description": "A network layer for relay which can work with multiple underlying network layers", 4 | "version": "0.0.4", 5 | "license": "BSD-3-Clause", 6 | "homepage": "https://github.com/eyston/relay-composite-network-layer", 7 | "repository": { 8 | "type": "git", 9 | "url": "git+https://github.com/eyston/relay-composite-network-layer.git" 10 | }, 11 | "main": "lib/index.js", 12 | "files": [ 13 | "lib", 14 | "README.md" 15 | ], 16 | "scripts": { 17 | "update-schema": "babel-node ./scripts/updateSchema.js", 18 | "merge-schemas": "babel-node ./scripts/mergeSchemas.js", 19 | "build": "babel src --out-dir lib", 20 | "karma": "karma start", 21 | "prepublish": "npm run build" 22 | }, 23 | "dependencies": { 24 | "lodash": "3.10.1" 25 | }, 26 | "peerDependencies": { 27 | "react-relay": ">=0.6.0" 28 | }, 29 | "babel": { 30 | "presets": [ 31 | "es2015", 32 | "stage-0" 33 | ] 34 | }, 35 | "devDependencies": { 36 | "babel-cli": "6.3.17", 37 | "babel-core": "6.3.26", 38 | "babel-loader": "6.2.0", 39 | "babel-polyfill": "6.3.14", 40 | "babel-preset-es2015": "6.3.13", 41 | "babel-preset-stage-0": "6.3.13", 42 | "babel-relay-plugin": "0.6.0", 43 | "expect": "1.13.4", 44 | "graphql": "0.4.14", 45 | "graphql-relay": "0.3.6", 46 | "json-loader": "0.5.4", 47 | "karma": "0.13.16", 48 | "karma-mocha": "0.2.1", 49 | "karma-webpack": "1.7.0", 50 | "mocha": "2.3.4", 51 | "react": "0.14.3", 52 | "react-dom": "0.14.3", 53 | "react-relay": "0.6.0", 54 | "relay-local-schema": "0.3.0", 55 | "webpack": "1.12.9" 56 | } 57 | } 58 | -------------------------------------------------------------------------------- /scripts/updateSchema.js: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env babel-node --optional es7.asyncFunctions 2 | /** 3 | * This file provided by Facebook is for non-commercial testing and evaluation 4 | * purposes only. Facebook reserves all rights not expressly granted. 5 | * 6 | * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 7 | * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 8 | * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL 9 | * FACEBOOK BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN 10 | * ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN 11 | * CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. 12 | */ 13 | 14 | import fs from 'fs'; 15 | import path from 'path'; 16 | import { schema as localSchema } from '../data/local/schema'; 17 | import { schema as serverSchema } from '../data/server/schema'; 18 | import { graphql } from 'graphql'; 19 | import { introspectionQuery, printSchema } from 'graphql/utilities'; 20 | 21 | const generateJSON = (schema, output) => { 22 | // Save JSON of full schema introspection for Babel Relay Plugin to use 23 | (async () => { 24 | var result = await (graphql(schema, introspectionQuery)); 25 | if (result.errors) { 26 | console.error( 27 | 'ERROR introspecting schema: ', 28 | JSON.stringify(result.errors, null, 2) 29 | ); 30 | } else { 31 | fs.writeFileSync( 32 | path.join(__dirname, output, 'schema.json'), 33 | JSON.stringify(result, null, 2) 34 | ); 35 | } 36 | })(); 37 | 38 | // Save user readable type system shorthand of schema 39 | fs.writeFileSync( 40 | path.join(__dirname, output, 'schema.graphql'), 41 | printSchema(schema) 42 | ); 43 | } 44 | 45 | generateJSON(localSchema, '../data/local'); 46 | generateJSON(serverSchema, '../data/server'); 47 | -------------------------------------------------------------------------------- /NOTES.md: -------------------------------------------------------------------------------- 1 | Classes 2 | ======= 3 | 4 | Not comprehensive by any means -- both classes and members. Only noting things which may be useful. 5 | 6 | RelayQueryRequest 7 | ----------------- 8 | 9 | extends 10 | 11 | `Deferred` 12 | 13 | members 14 | 15 | - getDebugName() : string 16 | - getID() : string 17 | - getType() : string 18 | - getVariables() : Variables 19 | - getQueryString() : string 20 | - getQuery() : RelayQuery.Root 21 | 22 | RelayMutationRequest 23 | -------------------- 24 | 25 | extends 26 | 27 | `Deferred` 28 | 29 | members 30 | 31 | - getDebugName() : string 32 | - getFiles() : FileMap 33 | - getVariables() : Variables 34 | - getQueryString() : string 35 | - getQuery() : RelayQuery.Root 36 | 37 | No id? 38 | 39 | 40 | RelayQueryNode 41 | -------------- 42 | 43 | statics 44 | 45 | - create(...) 46 | 47 | members 48 | 49 | - getChildren() : Array :: this grabs the concrete node's children and wraps them in `RelayQueryNode` 50 | - getField(field: RelayQueryField) : ?RelayQueryField :: this is a map of storage key -> child if child is a `RelayQueryField` 51 | - getType() : string 52 | - getVariables() : Variables 53 | - getConcreteQueryNode(onCacheMiss: () => any) : any 54 | 55 | RelayQueryRoot 56 | -------------- 57 | 58 | extends 59 | 60 | `RelayQueryNode` 61 | 62 | statics 63 | 64 | - build(...) 65 | - create(...) 66 | 67 | members 68 | 69 | - getName() : string 70 | - getID() : string 71 | - getCallsWithValues() : Array 72 | - getFieldName() : string 73 | - getIdentifyingArg() ?Call 74 | - getStorageKey() : string 75 | 76 | RelayQueryFragment 77 | ------------------ 78 | 79 | extends 80 | 81 | `RelayQueryNode` 82 | 83 | statics 84 | 85 | - build(...) 86 | - create(...) 87 | 88 | members 89 | 90 | - getDebugName() : string 91 | - getConcreteFragmentID() : string 92 | - getFragmentID() : string 93 | 94 | RelayQueryField 95 | --------------- 96 | 97 | extends 98 | 99 | `RelayQueryNode` 100 | 101 | statics 102 | 103 | - build(...) 104 | - create(...) 105 | 106 | members 107 | 108 | - getDebugName() : string 109 | - getSchemaName() : string 110 | - getSerializationKey() : string 111 | - getStorageKey() : string 112 | - getApplicationName() : string 113 | - getCallsWithValues() : Array 114 | - getCallType(callName: string) : ?string 115 | 116 | 117 | RelayQueryOperation 118 | ------------------- 119 | 120 | extends 121 | 122 | `RelayQueryOperation` 123 | 124 | members 125 | 126 | - getName() : string 127 | - getResponseType() : string 128 | - getType() : string 129 | - getInputType() : string 130 | - getCall() : Call 131 | - getCallVariableName() : string 132 | - isAbstract() : boolean 133 | 134 | RelayQueryMutation 135 | ------------------ 136 | 137 | extends 138 | 139 | `RelayQueryOperation` 140 | 141 | statics 142 | 143 | - build(...) 144 | 145 | members 146 | 147 | - equals(RelayQueryNode) : boolean 148 | -------------------------------------------------------------------------------- /data/local/schema.js: -------------------------------------------------------------------------------- 1 | import { 2 | GraphQLBoolean, 3 | GraphQLID, 4 | GraphQLInt, 5 | GraphQLList, 6 | GraphQLNonNull, 7 | GraphQLObjectType, 8 | GraphQLSchema, 9 | GraphQLString, 10 | GraphQLInterfaceType, 11 | } from 'graphql'; 12 | 13 | import { 14 | connectionArgs, 15 | connectionDefinitions, 16 | connectionFromArray, 17 | cursorForObjectInConnection, 18 | fromGlobalId, 19 | globalIdField, 20 | mutationWithClientMutationId, 21 | nodeDefinitions, 22 | toGlobalId, 23 | } from 'graphql-relay'; 24 | 25 | import { 26 | User, 27 | getUser, 28 | Draft, 29 | getDraft, 30 | getDrafts, 31 | addDraft 32 | } from './database'; 33 | 34 | const {nodeInterface, nodeField} = nodeDefinitions( 35 | (globalId) => { 36 | var {type, id} = fromGlobalId(globalId); 37 | if (type === 'User') { 38 | return getUser(id); 39 | } else if (type === 'Draft') { 40 | return getDraft(id); 41 | } 42 | return null; 43 | }, 44 | (obj) => { 45 | if (obj instanceof User) { 46 | return UserType; 47 | } else if (obj instanceof Draft) { 48 | return DraftType; 49 | } 50 | return null; 51 | } 52 | ); 53 | 54 | const DraftType = new GraphQLObjectType({ 55 | name: 'Draft', 56 | fields: () => ({ 57 | id: globalIdField('Draft'), 58 | text: { type: GraphQLString }, 59 | author: { 60 | type: UserType, 61 | resolve: ({authorId}) => getUser(authorId) 62 | } 63 | }), 64 | interfaces: [nodeInterface] 65 | }); 66 | 67 | const { 68 | connectionType: DraftsConnection, 69 | edgeType: DraftEdge, 70 | } = connectionDefinitions({ 71 | name: 'Draft', 72 | nodeType: DraftType, 73 | }); 74 | 75 | 76 | const UserType = new GraphQLObjectType({ 77 | name: 'User', 78 | fields: () => ({ 79 | id: globalIdField('User'), 80 | drafts: { 81 | type: DraftsConnection, 82 | args: connectionArgs, 83 | resolve: (obj, args) => connectionFromArray(getDrafts(), args) 84 | }, 85 | draftCount: { 86 | type: GraphQLInt, 87 | resolve: () => getDrafts().length 88 | } 89 | }), 90 | interfaces: [nodeInterface] 91 | }); 92 | 93 | const Root = new GraphQLObjectType({ 94 | name: 'Root', 95 | fields: { 96 | node: nodeField 97 | } 98 | }); 99 | 100 | const AddDraftMutation = mutationWithClientMutationId({ 101 | name: 'AddDraft', 102 | inputFields: { 103 | text: { type: new GraphQLNonNull(GraphQLString) } 104 | }, 105 | outputFields: { 106 | edge: { 107 | type: DraftEdge, 108 | resolve: draft => { 109 | return { 110 | cursor: cursorForObjectInConnection(getDrafts(), draft), 111 | node: draft 112 | }; 113 | } 114 | }, 115 | author: { 116 | type: UserType, 117 | resolve: ({authorId}) => getUser(authorId) 118 | } 119 | }, 120 | mutateAndGetPayload: ({text}) => { 121 | const draftId = addDraft(text); 122 | return getDraft(draftId); 123 | } 124 | }); 125 | 126 | const Mutation = new GraphQLObjectType({ 127 | name: 'Mutation', 128 | fields: { 129 | addDraft: AddDraftMutation 130 | } 131 | }); 132 | 133 | export var schema = new GraphQLSchema({ 134 | query: Root, 135 | mutation: Mutation 136 | }); 137 | -------------------------------------------------------------------------------- /src/utils.js: -------------------------------------------------------------------------------- 1 | 2 | export const pipeline = (obj, ...fns) => { 3 | return fns.reduce((obj, fn) => fn(obj), obj); 4 | } 5 | 6 | export const curry = (fn, ...args) => { 7 | return (arg) => fn(...[arg, ...args]); 8 | } 9 | 10 | export const push = (arr, item) => { 11 | return arr.concat(item) 12 | } 13 | 14 | export const flatten = arrs => { 15 | return arrs.reduce((a, b) => [...a, ...b], []); 16 | } 17 | 18 | export const values = obj => { 19 | return Object.keys(obj).map(key => obj[key]); 20 | } 21 | 22 | export const union = (...arrs) => { 23 | return arrs.reduce((a, b) => { 24 | return [...new Set([...a, ...b])]; 25 | }); 26 | } 27 | 28 | export const intersect = (...arrs) => { 29 | return arrs.reduce((a, b) => { 30 | const bs = new Set(b); 31 | return a.filter(n => bs.has(n)); 32 | }); 33 | } 34 | 35 | export const difference = (...arrs) => { 36 | return arrs.reduce((a, b) => { 37 | const bs = new Set(b); 38 | return a.filter(n => !bs.has(n)); 39 | }); 40 | } 41 | 42 | export const pick = (obj, ...keys) => { 43 | return into({}, Object.keys(obj) 44 | .filter(key => keys.includes(key) && obj[key]) 45 | .map(key => [key, obj[key]])); 46 | } 47 | 48 | export const pairs = obj => { 49 | return Object.keys(obj).map(key => [key, obj[key]]); 50 | } 51 | 52 | export const into = (obj, kvps) => { 53 | return kvps.reduce((obj, [key, value]) => ({ 54 | ...obj, 55 | [key]: value 56 | }), obj); 57 | } 58 | 59 | export const get = (obj, field, defaultValue) => { 60 | if (obj) { 61 | return obj[field] || defaultValue; 62 | } else { 63 | return defaultValue; 64 | } 65 | } 66 | 67 | export const getIn = (obj, path, defaultValue) => { 68 | if (path.length === 1) { 69 | return get(obj, path[0], defaultValue) 70 | } else if (obj) { 71 | const key = path[0]; 72 | return getIn(obj[key], path.slice(1), defaultValue); 73 | } else { 74 | return defaultValue; 75 | } 76 | } 77 | 78 | export const set = (obj, key, value) => { 79 | return { 80 | ...obj, 81 | [key]: value 82 | }; 83 | } 84 | 85 | export const setIn = (obj, path, value) => { 86 | if (path.length === 1) { 87 | return { 88 | ...obj, 89 | [path[0]]: value 90 | }; 91 | } else if (obj) { 92 | const key = path[0]; 93 | return { 94 | ...obj, 95 | [key]: setIn(obj[key] || {}, path.slice(1), value) 96 | }; 97 | } else { 98 | return setIn({}, path, value); 99 | } 100 | } 101 | 102 | export const update = (obj, field, defaultValue, updater) => { 103 | if (!updater) { 104 | updater = defaultValue; 105 | defaultValue = undefined; 106 | } 107 | 108 | return { 109 | ...obj, 110 | [field]: updater(obj[field] || defaultValue) 111 | }; 112 | } 113 | 114 | export const updateIn = (obj, path, defaultValue, updater) => { 115 | if (path.length === 1) { 116 | return update(obj, path[0], defaultValue, updater); 117 | } else { 118 | const key = path[0]; 119 | if (Array.isArray(obj)) { 120 | const copy = obj.slice(); 121 | copy[key] = updateIn(copy[key] || {}, path.slice(1), defaultValue, updater); 122 | return copy; 123 | } else { 124 | return { 125 | ...obj, 126 | [key]: updateIn(obj[key] || {}, path.slice(1), defaultValue, updater) 127 | }; 128 | } 129 | } 130 | } 131 | -------------------------------------------------------------------------------- /src/execute/network.js: -------------------------------------------------------------------------------- 1 | import Relay from 'react-relay'; 2 | import merge from 'lodash/object/merge'; 3 | 4 | import RelayQuery from 'react-relay/lib/RelayQuery'; 5 | import RelayQueryRequest from 'react-relay/lib/RelayQueryRequest'; 6 | import RelayMutationRequest from 'react-relay/lib/RelayMutationRequest'; 7 | 8 | import {flatten,setIn,updateIn} from '../utils'; 9 | 10 | export const executeCompositeRequests = async (compositeRequests, context) => { 11 | 12 | compositeRequests.forEach(async compositeRequest => { 13 | try { 14 | let responses = await Promise.all(compositeRequest.queries.map(query => { 15 | return executeQuery(query, context); 16 | })); 17 | 18 | compositeRequest.request.resolve(merge({}, ...responses)); 19 | } catch (err) { 20 | compositeRequest.request.reject(err); 21 | } 22 | }); 23 | 24 | } 25 | 26 | export const executeCompositeMutation = async ({mutation,request}, context) => { 27 | try { 28 | const response = await executeMutation(mutation, context); 29 | request.resolve(response); 30 | } catch (err) { 31 | request.reject(err); 32 | } 33 | } 34 | 35 | const executeQuery = async (query, context) => { 36 | const request = new RelayQueryRequest(query.query); 37 | const networkLayer = context.layers[query.schema]; 38 | 39 | networkLayer.sendQueries([request]); 40 | 41 | return request.then(data => executeDependents(query, data, context)); 42 | } 43 | 44 | const executeMutation = async (mutation, context) => { 45 | const request = new RelayMutationRequest(mutation.mutation); 46 | 47 | const networkLayer = context.layers[mutation.schema]; 48 | 49 | networkLayer.sendMutation(request); 50 | 51 | return request.then(data => executeDependents(mutation, data, context)); 52 | } 53 | 54 | const executeDependents = async (query, data, context) => { 55 | const datasWithPath = await Promise.all(query.dependents.map(async ({path,fragment}) => { 56 | const pathIds = getIdsWithPath(data.response, path); 57 | 58 | return Promise.all(pathIds.map(async ({id, path}) => { 59 | const query = createCompositeQuery(fragment, id); 60 | const data = await executeQuery(query, context); 61 | return { data, path }; 62 | })); 63 | })); 64 | 65 | return flatten(datasWithPath).reduce((data, {path, data: depData}) => { 66 | return updateIn(data, ['response', ...path], node => merge({}, node, depData.response.node)); 67 | }, data); 68 | } 69 | 70 | const createCompositeQuery = ({children, schema, type, dependents}, id) => { 71 | const query = Relay.createQuery({ 72 | calls: [{ 73 | kind: 'Call', 74 | metadata: {}, 75 | name: 'id', 76 | value: { 77 | kind: 'CallVariable', 78 | callVariableName: 'id' 79 | } 80 | }], 81 | fieldName: 'node', 82 | kind: 'Query', 83 | metadata: { 84 | isAbstract: true, 85 | identifyingArgName: 'id' 86 | }, 87 | name: 'App', 88 | type: 'Node', 89 | children: [{ 90 | fieldName: 'id', 91 | kind: 'Field', 92 | metadata: { 93 | isGenerated: true, 94 | isRequisite: true 95 | }, 96 | type: 'ID' 97 | }, { 98 | fieldName: '__typename', 99 | kind: 'Field', 100 | metadata: { 101 | isGenerated: true, 102 | isRequisite: true 103 | }, 104 | type: 'String' 105 | }, { 106 | kind: 'Fragment', 107 | metadata: {}, 108 | name: type, 109 | type: type, 110 | children: [] 111 | }] 112 | }, { id }); 113 | 114 | return { 115 | query: query.clone(query.getChildren().map(child => { 116 | if (child instanceof RelayQuery.Fragment) { 117 | return child.clone(children); 118 | } else { 119 | return child; 120 | } 121 | })), 122 | schema, 123 | dependents 124 | }; 125 | } 126 | 127 | const getIdsWithPath = (data, path, backwardPath = []) => { 128 | if (path.length === 0) { 129 | const id = data.id; 130 | return id ? [{id, path: backwardPath}] : []; 131 | } else { 132 | const segment = path[0]; 133 | const sub = data[path[0]]; 134 | const remaining = path.slice(1); 135 | if (sub) { 136 | if (Array.isArray(sub)) { 137 | return sub.reduce((ids, item, index) => { 138 | return ids.concat(getIdsWithPath(item, remaining, backwardPath.concat([segment, index]))); 139 | }, []); 140 | } else { 141 | return getIdsWithPath(sub, remaining, backwardPath.concat(segment)); 142 | } 143 | } else { 144 | return []; 145 | } 146 | } 147 | } 148 | -------------------------------------------------------------------------------- /src/split.js: -------------------------------------------------------------------------------- 1 | import RelayQuery from 'react-relay/lib/RelayQuery'; 2 | 3 | import {getIn,update} from './utils'; 4 | 5 | const ANY_SCHEMA = '__ANY__'; 6 | 7 | // CompositeRequest = { 8 | // queries: [CompositeQuery], 9 | // request 10 | // } 11 | 12 | export const createCompositeRequest = (request, context) => { 13 | const query = request.getQuery(); 14 | const queries = splitBySchema(query, context); 15 | 16 | return { 17 | queries, 18 | request 19 | }; 20 | } 21 | 22 | export const createMutationRequest = (request, context) => { 23 | const mutation = request.getMutation(); 24 | 25 | return { 26 | mutation: splitBySchema(mutation, context), 27 | request 28 | }; 29 | } 30 | 31 | const splitBySchema = (query, context) => { 32 | if (query instanceof RelayQuery.Root) { 33 | return createCompositeQuery(query, context); 34 | } else if (query instanceof RelayQuery.Field) { 35 | return createCompositeFieldField(query, context); 36 | } else if (query instanceof RelayQuery.Fragment) { 37 | return createCompositeFragmentField(query, context); 38 | } else if (query instanceof RelayQuery.Mutation) { 39 | return createCompositeMutation(query, context); 40 | } else { 41 | // how do I print out wtf the type is lulz 42 | throw new Error('unhandled RelayQuery type'); 43 | } 44 | } 45 | 46 | // CompositeQuery = { 47 | // query: RelayQuery 48 | // schema, 49 | // dependents 50 | // } 51 | 52 | const createCompositeQuery = (root, context) => { 53 | 54 | const {extensions,queryType} = context; 55 | const field = root.getFieldName(); 56 | const schema = extensions[queryType][field] || ANY_SCHEMA; 57 | 58 | const fragments = createFragments(root.getChildren(), { 59 | ...context, 60 | parent: root.getType(), 61 | schema 62 | }); 63 | 64 | // query { node } 65 | if (schema === ANY_SCHEMA) { 66 | const {children,dependents} = collectFragments([field], schema, fragments); 67 | const oldFragment = children.find(c => c instanceof RelayQuery.Fragment); 68 | 69 | // pop dependencies of ANY_SCHEMA up into root queries 70 | return dependents.map(dep => { 71 | // inline ANY_SCHEMA fragments into each schema fragment 72 | const newFragment = oldFragment.clone([...oldFragment.getChildren(), ...dep.fragment.children]); 73 | return { 74 | query: root.clone(children.map(child => child === oldFragment ? newFragment : child)), 75 | schema: dep.fragment.schema, 76 | dependents: dep.fragment.dependents 77 | }; 78 | }); 79 | } else { 80 | const {children,dependents} = collectFragments([field], schema, fragments); 81 | 82 | return [{ 83 | query: root.clone(children), 84 | schema, 85 | dependents 86 | }]; 87 | } 88 | } 89 | 90 | // CompositeMutation = { 91 | // mutation: RelayMutation 92 | // schema, 93 | // dependents 94 | // } 95 | 96 | const createCompositeMutation = (mutation, context) => { 97 | 98 | const {extensions,mutationType} = context; 99 | const call = mutation.getCall(); 100 | const field = call.name; 101 | const schema = extensions[mutationType][field]; 102 | 103 | // TODO: invariant schema !== null 104 | 105 | const fragments = createFragments(mutation.getChildren(), { 106 | ...context, 107 | parent: mutation.getType(), 108 | schema 109 | }); 110 | 111 | const {children,dependents} = collectFragments([field], schema, fragments); 112 | 113 | return { 114 | mutation: mutation.clone(children), 115 | schema, 116 | dependents 117 | }; 118 | 119 | } 120 | 121 | // CompositeFragment = { 122 | // children, 123 | // type, 124 | // schema, 125 | // dependents 126 | // } 127 | 128 | const createFragments = (children, context) => { 129 | const {parent,schema} = context; 130 | 131 | return children 132 | .map(child => splitBySchema(child, context)) 133 | .reduce((fragments, field) => { 134 | const {schema} = field; 135 | return update(fragments, schema, emptyFragment(parent, schema), fragment => addField(fragment, field)) 136 | }, {}); 137 | } 138 | 139 | const collectFragments = (path, schema, fragments) => { 140 | const children = getIn(fragments, [schema, 'children'], []); 141 | const dependents = Object.keys(fragments) 142 | .map(schema => fragments[schema]) 143 | .reduce((deps, fragment) => { 144 | if (fragment.schema === schema) { 145 | return [...deps, ...fragment.dependents.map(d => increaseDepth(d, path))]; 146 | } else { 147 | return [...deps, createDependentQuery(fragment, path)]; 148 | } 149 | }, []); 150 | 151 | return {children,dependents,schema}; 152 | } 153 | 154 | const emptyFragment = (type, schema) => ({ 155 | type, 156 | schema, 157 | children: [], 158 | dependents: [] 159 | }) 160 | 161 | const addField = (fragment, field) => { 162 | return { 163 | ...fragment, 164 | children: [...fragment.children, field.node], 165 | dependents: [...fragment.dependents, ...field.dependents] 166 | }; 167 | } 168 | 169 | // CompositeField = { 170 | // node, 171 | // schema, 172 | // dependents 173 | // } 174 | 175 | const createCompositeFieldField = (field, context) => { 176 | const {schema,parent,extensions} = context; 177 | 178 | const fieldSchema = getIn(extensions, [parent, field.getSchemaName()], schema); 179 | 180 | const fragments = createFragments(field.getChildren(), { 181 | ...context, 182 | parent: field.getType(), 183 | schema: fieldSchema 184 | }); 185 | 186 | const key = field.getSerializationKey(); 187 | const {children,dependents} = collectFragments([key], fieldSchema, fragments); 188 | 189 | return { 190 | node: field.clone(children), 191 | schema: fieldSchema, 192 | dependents 193 | }; 194 | 195 | } 196 | 197 | const createCompositeFragmentField = (fragment, context) => { 198 | const {schema} = context; 199 | 200 | const fragments = createFragments(fragment.getChildren(), { 201 | ...context, 202 | parent: fragment.getType() 203 | }); 204 | 205 | const {children,dependents} = collectFragments([], schema, fragments); 206 | 207 | return { 208 | node: fragment.clone(children), 209 | schema, 210 | dependents 211 | }; 212 | } 213 | 214 | // CompositeDependentQuery = { 215 | // fragment, 216 | // path 217 | // } 218 | 219 | const createDependentQuery = (fragment, path) => { 220 | return { 221 | path, 222 | // sorta unsure if this covers it all ... 223 | fragment: update(fragment, 'dependents', deps => deps.map(d => increaseDepth(d, ['node']))) 224 | }; 225 | } 226 | 227 | const increaseDepth = (dep, path) => { 228 | return { 229 | ...dep, 230 | path: [...path, ...dep.path] 231 | }; 232 | } 233 | -------------------------------------------------------------------------------- /test/index.js: -------------------------------------------------------------------------------- 1 | import 'babel-polyfill'; 2 | 3 | import expect from 'expect'; 4 | 5 | import Relay from 'react-relay'; 6 | import RelayLocalSchema from 'relay-local-schema'; 7 | 8 | import {schema as localSchema} from '../data/local/schema'; 9 | import {schema as serverSchema} from '../data/server/schema'; 10 | 11 | import RelayCompositeNetworkLayer from '../src'; 12 | 13 | import config from 'json!../data/config.json'; 14 | 15 | const DEBUG = false; 16 | 17 | class RelayLoggingNetworkLayer { 18 | constructor(name, layer) { 19 | this.name = name; 20 | this.layer = layer; 21 | } 22 | 23 | sendQueries(queryRequests) { 24 | queryRequests.forEach(request => { 25 | if (DEBUG) { console.log(this.name, request.getID(), 'request', request.getQueryString()); } 26 | request.then(response => { 27 | if (DEBUG) { console.log(this.name, request.getID(), 'response', JSON.stringify(response, null, 2)); } 28 | }); 29 | }); 30 | 31 | return this.layer.sendQueries(queryRequests); 32 | } 33 | 34 | sendMutation(mutationRequest) { 35 | if (DEBUG) { 36 | console.log(this.name, 'request', mutationRequest.getQueryString()); 37 | mutationRequest.then(response => { 38 | if (DEBUG) { 39 | console.log(this.name, 'response', JSON.stringify(response, null, 2)); 40 | } 41 | }); 42 | } 43 | return this.layer.sendMutation(mutationRequest); 44 | } 45 | 46 | supports(...options) { 47 | return this.layer.supports(...options); 48 | } 49 | } 50 | 51 | const withLogging = (name, layer) => { 52 | return new RelayLoggingNetworkLayer(name, layer); 53 | } 54 | 55 | // SUT 56 | Relay.injectNetworkLayer(withLogging('composite', new RelayCompositeNetworkLayer({ 57 | ...config, 58 | layers: { 59 | server: withLogging('server', new RelayLocalSchema.NetworkLayer({schema: serverSchema})), 60 | local: withLogging('local', new RelayLocalSchema.NetworkLayer({schema: localSchema})) 61 | } 62 | }))); 63 | 64 | describe('RelayCompositeNetworkLayer', () => { 65 | 66 | it('it can query a single schemas', async () => { 67 | 68 | const node = Relay.QL` 69 | query { 70 | viewer { 71 | name 72 | } 73 | } 74 | `; 75 | 76 | const query = Relay.createQuery(node, {}); 77 | 78 | const response = await getQuery(query); 79 | 80 | expect(removeDataIds(response)).toEqual({ 81 | name: 'Huey' 82 | }); 83 | 84 | }); 85 | 86 | it('can query multiple schemas', async () => { 87 | 88 | const node = Relay.QL` 89 | query { 90 | viewer { 91 | name 92 | drafts(first: $first) { 93 | edges { 94 | node { 95 | text 96 | } 97 | } 98 | } 99 | } 100 | } 101 | `; 102 | 103 | const query = Relay.createQuery(node, {first: 10}); 104 | 105 | const response = await getQuery(query); 106 | 107 | expect(removeDataIds(response)).toEqual({ 108 | name: 'Huey', 109 | drafts: { 110 | edges: [{ 111 | node: { 112 | text: 'This is a draft' 113 | } 114 | },{ 115 | node: { 116 | text: 'This is another draft' 117 | } 118 | }] 119 | } 120 | }); 121 | 122 | }); 123 | 124 | it('can traverse between multiple schemas', async () => { 125 | 126 | const node = Relay.QL` 127 | query { 128 | viewer { 129 | name 130 | drafts(first: $first) { 131 | edges { 132 | node { 133 | text 134 | author { 135 | name 136 | } 137 | } 138 | } 139 | } 140 | } 141 | } 142 | `; 143 | 144 | const query = Relay.createQuery(node, {first: 10}); 145 | 146 | const response = await getQuery(query); 147 | 148 | expect(removeDataIds(response)).toEqual({ 149 | name: 'Huey', 150 | drafts: { 151 | edges: [{ 152 | node: { 153 | text: 'This is a draft', 154 | author: { 155 | name: 'Huey' 156 | } 157 | } 158 | },{ 159 | node: { 160 | text: 'This is another draft', 161 | author: { 162 | name: 'Huey' 163 | } 164 | } 165 | }] 166 | } 167 | }); 168 | 169 | }); 170 | 171 | it('can query multiple schemas on a single node', async () => { 172 | 173 | const node = Relay.QL` 174 | query { 175 | node(id: $id) { 176 | ... on User { 177 | age 178 | gender 179 | draftCount 180 | } 181 | } 182 | } 183 | `; 184 | 185 | const query = Relay.createQuery(node, {id: 'VXNlcjox'}); 186 | 187 | const response = await getQuery(query); 188 | 189 | expect(removeDataIds(response)).toEqual({ 190 | age: 13, 191 | gender: 'male', 192 | draftCount: 2 193 | }); 194 | 195 | }); 196 | 197 | it('can work with a mutation payload in multiple scheams', async () => { 198 | 199 | class AddDraftMutation extends Relay.Mutation { 200 | 201 | static fragments = { 202 | author: () => Relay.QL` 203 | fragment on User { 204 | id 205 | } 206 | ` 207 | } 208 | 209 | getMutation() { 210 | return Relay.QL`mutation { addDraft }`; 211 | } 212 | 213 | getVariables() { 214 | return { 215 | text: this.props.text 216 | } 217 | } 218 | 219 | getFatQuery() { 220 | return Relay.QL` 221 | fragment on AddDraftPayload { 222 | author { 223 | drafts 224 | draftCount 225 | } 226 | edge 227 | } 228 | `; 229 | } 230 | 231 | getConfigs() { 232 | return [{ 233 | type: 'RANGE_ADD', 234 | parentName: 'author', 235 | parentID: this.props.author.id, 236 | connectionName: 'drafts', 237 | edgeName: 'edge', 238 | rangeBehaviors: { 239 | '': 'append' 240 | } 241 | }]; 242 | } 243 | 244 | } 245 | 246 | const node = Relay.QL` 247 | query { 248 | viewer { 249 | draftCount 250 | drafts(first: $first) { 251 | edges { 252 | node { 253 | text 254 | author { 255 | name 256 | } 257 | } 258 | } 259 | } 260 | } 261 | } 262 | `; 263 | 264 | const query = Relay.createQuery(node, {first: 10}); 265 | 266 | await getQuery(query); 267 | 268 | 269 | const mutation = new AddDraftMutation({ 270 | author: { id: 'VXNlcjox' }, 271 | text: 'Here is some text yo' 272 | }); 273 | 274 | await doMutation(mutation); 275 | 276 | const response = Relay.Store.readQuery(query)[0]; 277 | 278 | expect(removeDataIds(response)).toEqual({ 279 | draftCount: 3, 280 | drafts: { 281 | edges: [{ 282 | node: { 283 | text: 'This is a draft', 284 | author: { 285 | name: 'Huey' 286 | } 287 | } 288 | },{ 289 | node: { 290 | text: 'This is another draft', 291 | author: { 292 | name: 'Huey' 293 | } 294 | } 295 | },{ 296 | node: { 297 | text: 'Here is some text yo', 298 | author: { 299 | name: 'Huey' 300 | } 301 | } 302 | }] 303 | } 304 | }); 305 | 306 | }); 307 | 308 | 309 | }); 310 | 311 | const isObject = obj => { 312 | return typeof obj === 'object' && obj !== null; 313 | } 314 | 315 | const removeDataIds = obj => { 316 | if (isObject(obj)) { 317 | delete obj.__dataID__; 318 | Object.keys(obj).forEach(key => obj[key] = removeDataIds(obj[key])); 319 | return obj; 320 | } else if (Array.isArray(obj)) { 321 | obj.forEach(item => removeDataIds(item)); 322 | return obj; 323 | } else { 324 | return obj; 325 | } 326 | } 327 | 328 | const getQuery = query => { 329 | return new Promise((resolve, reject) => { 330 | Relay.Store.primeCache({ viewer: query }, state => { 331 | if (state.error) { 332 | reject(state.error); 333 | } else if (state.done) { 334 | resolve(Relay.Store.readQuery(query)[0]); 335 | } 336 | }); 337 | }); 338 | } 339 | 340 | const doMutation = mutation => { 341 | return new Promise((resolve, reject) => { 342 | Relay.Store.update(mutation, { 343 | onSuccess: () => resolve(), 344 | onFailure: transaction => reject(transaction.getError()) 345 | }); 346 | }); 347 | } 348 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | RelayCompositeNetworkLayer 2 | ========================== 3 | 4 | The `RelayCompositeNetworkLayer` is a [Relay Network Layer](https://facebook.github.io/relay/docs/guides-network-layer.html) which can be made of many different Network Layers each with their own schema. This is accomplished by merging multiple schemas into a single schema. Relay then generates appropriate queries using this schema while the `RelayCompositeNetworkLayer` splits and sends the queries by schema. 5 | 6 | The main use case for this is allowing a *local* and *server* schema. 7 | 8 | Installation 9 | ============ 10 | 11 | `npm install relay-composite-network-layer` 12 | 13 | It has a peer dependency on `react-relay` (the version is pretty strict at the moment but probably could be loosened!). 14 | 15 | Usage 16 | ===== 17 | 18 | The first step is merging the schema. You can add a build step or make it part of your `update-schema` script. 19 | 20 | ```js 21 | import {createCompositeSchema} from 'relay-composite-network-layer/lib/merge'; 22 | 23 | const {schema,config} = createCompositeSchema({ 24 | // name / value pairs of schemas 25 | server: serverSchema, 26 | local: localSchema 27 | }, { 28 | // names for the query and mutation type of the output schema 29 | // these can be the same names as your input schemas 30 | queryType: 'Query', 31 | mutationType: 'Mutation' 32 | }); 33 | ``` 34 | 35 | The outputs of `schema` and `config` need to be saved to `json` files for consumption. Configure the `babelRelayPlugin` to use the saved merged `schema`. With the schemas merged and `Relay` configured you should be able to write / parse queries which hit multiple schemas. 36 | 37 | A full script example is at the end of the README. 38 | 39 | The next step is to create the `RelayCompositeNetworkLayer` on the client. 40 | 41 | ```js 42 | import RelayCompositeNetworkLayer from 'relay-composite-network-layer'; 43 | 44 | const compositeNetworkLayer = new RelayCompositeNetworkLayer({ 45 | // config is the output of the `createCompositeSchema` function 46 | ...config, 47 | // key / value pairs of schema 48 | // names need to match the call to `createCompositeSchema` 49 | layers: { 50 | server: new Relay.DefaultNetworkLayer('/graphql'), 51 | local: new RelayLocalSchema.NetworkLayer({schema: localSchema}) 52 | } 53 | }); 54 | ``` 55 | 56 | Here we are creating a composite network layer which has two schemas. The `server` schema uses the default network layer and makes network requests to the `/graphql` endpoint. The `local` schema uses the [RelayLocalSchema](https://github.com/relay-tools/relay-local-schema) library to execute `graphql-js` on the client. 57 | 58 | Finally inject the network layer into `Relay`. 59 | 60 | ```js 61 | Relay.injectNetworkLayer(compositeNetworkLayer); 62 | ``` 63 | 64 | That should be it! 65 | 66 | Limitations 67 | =========== 68 | 69 | The main limitations are around merging. 70 | 71 | You can only merge `node` interface objects. This means if you define *User* in multiple schemas each schema *User* needs an `id` field and needs to be able to be fetched from the `node` root query field. Other objects which appear in multiple schemas must be equivalent (same fields) or the merge will fail. 72 | 73 | Enum's, Scalar's, and Input Object's also must be equivalent if they are found with the same name in multiple schemas. 74 | 75 | Union's and Interface's, with the exception of the `node` interface, are not allowed to be in multiple schemas. 76 | 77 | Fields on the Query, Mutation, and Subscription field are also merged. These names must be unique. For example most schemas have a `viewer` type but this field must only exist in one schema. Other schemas navigate to the `viewer` by querying `node` with the `viewer` id. 78 | 79 | Some of these restrictions could be lifted -- I just haven't thought around the use cases! 80 | 81 | Example 82 | ======= 83 | 84 | I'm going to use the example from the github issue for relay local data: https://github.com/facebook/relay/issues/114 85 | 86 | We have the following query: 87 | 88 | ``` 89 | query { 90 | viewer { 91 | name, # server field 92 | drafts(first: 10) { # client-only field 93 | edges { node { title } } 94 | } 95 | } 96 | } 97 | ``` 98 | 99 | This means we have the following two schemas. 100 | 101 | **Server** 102 | 103 | ``` 104 | type User : Node { 105 | id: ID! 106 | name: String 107 | } 108 | 109 | type Query { 110 | viewer: User 111 | node: Node 112 | } 113 | ``` 114 | 115 | **Local** 116 | 117 | ``` 118 | type User : Node { 119 | id: ID! 120 | drafts: DraftConnection 121 | } 122 | 123 | type Draft : Node { 124 | id: ID! 125 | title: String 126 | } 127 | 128 | type DraftConnection { 129 | edges: [DraftEdge] 130 | } 131 | 132 | type DraftEdge { 133 | node: Draft 134 | } 135 | 136 | type Query { 137 | node: Node 138 | } 139 | ``` 140 | 141 | The type `User` is defined in both schemas. Both schemas also have a query root type named *Query* but the names do not have to match. Only the server schema has the `viewer` field. 142 | 143 | When merged we get a third schema. 144 | 145 | **Composite** 146 | 147 | ``` 148 | type User : Node { 149 | id: ID! 150 | name: String 151 | drafts: DraftConnection 152 | } 153 | 154 | type Draft : Node { 155 | id: ID! 156 | title: String 157 | } 158 | 159 | type DraftConnection { 160 | edges: [DraftEdge] 161 | } 162 | 163 | type DraftEdge { 164 | node: Draft 165 | } 166 | 167 | type Query { 168 | viewer: User 169 | node: Node 170 | } 171 | ``` 172 | 173 | This schema presents a unified view of the two schemas which Relay can use to generate queries. 174 | 175 | Lets follow the execution of the query. 176 | 177 | ``` 178 | query { 179 | viewer { 180 | name, # server field 181 | drafts(first: 10) { # client-only field 182 | edges { node { title } } 183 | } 184 | } 185 | } 186 | ``` 187 | 188 | The composite network layer first splits the query by schema. This results in multiple queries with some being dependent on the result of others. 189 | 190 | 191 | **q1** *server* 192 | 193 | ``` 194 | query { 195 | viewer { 196 | id 197 | name 198 | } 199 | } 200 | ``` 201 | 202 | This query is not dependent on anything. 203 | 204 | **q2** *local* 205 | 206 | ``` 207 | query { 208 | node(id: $id) { 209 | id 210 | __typename 211 | ... on User { 212 | drafts(first: 10) { 213 | edges { node { title } } 214 | } 215 | } 216 | } 217 | } 218 | ``` 219 | 220 | This query is dependent on the `viewer.id` result from **q1**. It will be run sequentially following **q1**. 221 | 222 | The queries are then executed. 223 | 224 | **q1** 225 | 226 | ``` 227 | { 228 | data: { 229 | viewer: { 230 | id: 1, 231 | name: 'Huey' 232 | } 233 | } 234 | } 235 | ``` 236 | 237 | **q2** 238 | 239 | ``` 240 | { 241 | data: { 242 | node: { 243 | id: 1, 244 | __typename: 'User', 245 | drafts: { 246 | edges: [{ 247 | node: { title: 'Taste Javascript'}, 248 | node: { title: 'Paint a self portrait'} 249 | }] 250 | } 251 | } 252 | } 253 | } 254 | ``` 255 | 256 | Finally the results are merged and passed back to Relay. 257 | 258 | ``` 259 | { 260 | data: { 261 | viewer: { 262 | id: 1, 263 | __typename: 'User', 264 | name: 'Huey', 265 | drafts: { 266 | edges: [{ 267 | node: { title: 'Taste Javascript'}, 268 | node: { title: 'Paint a self portrait'} 269 | }] 270 | } 271 | } 272 | } 273 | } 274 | ``` 275 | 276 | Relay cares not that part of the query has been local and part of it has been remote. 277 | 278 | You could also add an `author` field to `Draft` and query back and forth between *local* and *server*: 279 | 280 | ``` 281 | query { 282 | viewer { # server field 283 | selectedDraft { # local field 284 | author { # server field 285 | selectedDraft { 286 | author { 287 | selectedDraft { 288 | author { 289 | name 290 | } 291 | } 292 | } 293 | } 294 | } 295 | } 296 | } 297 | } 298 | ``` 299 | 300 | This will generate a bunch of sequential queries so be careful! 301 | 302 | Mutations 303 | ========= 304 | 305 | Mutations are treated just like queries. As far as I know Relay only allows a single field on a mutation so no extra work is needed at the network layer for coordination. The mutation field schema is looked up and the mutation is sent to the proper schema network layer. The mutation payload is then just like a query and can come from multiple schemas. 306 | 307 | ``` 308 | mutation { 309 | addDraft(input: $input) { # client field 310 | author { 311 | name # server field 312 | draftCount 313 | } 314 | edge { 315 | node { 316 | title 317 | } 318 | } 319 | } 320 | } 321 | ``` 322 | 323 | Merge Script 324 | ------------ 325 | 326 | Here is the full script: 327 | 328 | ```js 329 | import fs from 'fs'; 330 | import path from 'path'; 331 | 332 | import localSchema from '../data/local/schema.json'; 333 | import serverSchema from '../data/server/schema.json'; 334 | 335 | import {createCompositeSchema} from 'relay-composite-network-layer/lib/merge'; 336 | 337 | const {schema,config} = createCompositeSchema({ 338 | server: serverSchema, 339 | local: localSchema 340 | }, { 341 | queryType: 'Query', 342 | mutationType: 'Mutation' 343 | }); 344 | 345 | fs.writeFileSync( 346 | path.join(__dirname, '../data/', 'schema.json'), 347 | JSON.stringify(schema, null, 2) 348 | ); 349 | 350 | fs.writeFileSync( 351 | path.join(__dirname, '../data/', 'config.json'), 352 | JSON.stringify(config, null, 2) 353 | ); 354 | ``` 355 | 356 | 357 | 358 | TODO 359 | ==== 360 | 361 | - chained mutation 362 | - AddTodo / DeleteDraft 363 | 364 | - remove graphql as a peer 365 | -------------------------------------------------------------------------------- /src/merge/index.js: -------------------------------------------------------------------------------- 1 | import {difference,intersect,into,pairs,pick,set,setIn,update,union,values} from '../utils'; 2 | 3 | // Schema = { 4 | // name: string, 5 | // queryType: string, 6 | // mutationType: string, 7 | // subscriptionType: string, 8 | // types: {[string]: GraphQLIntrospectionType} 9 | // } 10 | 11 | // Config = { 12 | // queryType: string, 13 | // mutationType: string, 14 | // subscriptionType: string, 15 | // extensions: {[typeName]: {[fieldName]: schemaName}} 16 | // } 17 | 18 | const ROOT_TYPES = ['queryType', 'mutationType', 'subscriptionType']; 19 | 20 | export const mergeSchemas = (schemaMap, options) => { 21 | 22 | assertOptionsValid(options); 23 | 24 | const {schema,extensions} = Object.keys(schemaMap).reduce(({schema,extensions}, key) => { 25 | const source = jsonToSchema(key, schemaMap[key]); 26 | return Object.keys(source.types).reduce(({schema, extensions}, typeName) => { 27 | const {type,extensions:typeExtensions} = mergeType(schema, source, typeName); 28 | return { 29 | schema: setIn(schema, ['types', type.name], type), 30 | extensions: update(extensions, type.name, exs => mergeExtensions(exs, typeExtensions)) 31 | }; 32 | }, {schema,extensions}); 33 | }, {schema: emptySchema(options), extensions: { }}); 34 | 35 | return { 36 | schema: { 37 | data: { 38 | __schema: { 39 | ...into({}, pairs(pick(schema, ...ROOT_TYPES)).map(([type,name]) => [type, {name}])), 40 | types: values(schema.types) 41 | } 42 | } 43 | }, 44 | config: { 45 | ...pick(schema, ...ROOT_TYPES), 46 | extensions 47 | } 48 | }; 49 | 50 | } 51 | 52 | export const createCompositeSchema = (schemaMap, options) => mergeSchemas(schemaMap, options) 53 | 54 | // empty object => undefined 55 | const mergeExtensions = (exsA, exsB) => { 56 | const exs = {...exsA, ...exsB}; 57 | if (Object.keys(exs).length > 0) { 58 | return exs; 59 | } 60 | } 61 | 62 | const mergeType = (destinationSchema, sourceSchema, typeName) => { 63 | const source = sourceSchema.types[typeName]; 64 | 65 | if (implementsNode(source)) { 66 | return mergeExtendableType(destinationSchema, sourceSchema, typeName); 67 | } else if (source.name === sourceSchema.queryType) { 68 | return mergeQueryType(destinationSchema, sourceSchema, typeName); 69 | } else if (source.name === sourceSchema.mutationType) { 70 | return mergeMutationType(destinationSchema, sourceSchema, typeName); 71 | } else if (source.name === sourceSchema.subscriptionType) { 72 | return mergeSubscriptionType(destinationSchema, sourceSchema, typeName); 73 | } else if (source.name === 'Node') { 74 | return mergeNodeType(destinationSchema, sourceSchema, typeName); 75 | } else { 76 | const destination = destinationSchema.types[typeName]; 77 | 78 | assertEquivalent(destination, source); 79 | 80 | return {type: source}; 81 | } 82 | } 83 | 84 | const mergeExtendableType = (destinationSchema, sourceSchema, typeName) => { 85 | const destination = destinationSchema.types[typeName]; 86 | const source = sourceSchema.types[typeName]; 87 | 88 | const fields = source.fields.filter(f => f.name !== 'id'); 89 | const extensions = into({}, fields.map(f => [f.name, sourceSchema.name])); 90 | 91 | if (destination) { 92 | return { 93 | type: mergeFields(destination, fields), 94 | extensions 95 | } 96 | } else { 97 | return { 98 | type: source, 99 | extensions: into({}, fields.map(f => [f.name, sourceSchema.name])) 100 | } 101 | } 102 | } 103 | 104 | const mergeNodeType = (destinationSchema, sourceSchema, typeName) => { 105 | // TODO: merge the possible types 106 | const destination = destinationSchema.types[typeName]; 107 | const source = sourceSchema.types[typeName]; 108 | 109 | if (destination) { 110 | return {type:destination} 111 | } else { 112 | return {type:source} 113 | } 114 | 115 | } 116 | 117 | const mergeQueryType = (destinationSchema, sourceSchema, sourceTypeName) => { 118 | return extendType(destinationSchema, destinationSchema.queryType, sourceSchema, sourceTypeName, ['node']); 119 | } 120 | 121 | const mergeMutationType = (destinationSchema, sourceSchema, sourceTypeName) => { 122 | return extendType(destinationSchema, destinationSchema.mutationType, sourceSchema, sourceTypeName); 123 | } 124 | 125 | const mergeSubscriptionType = (destinationSchema, sourceSchema, sourceTypeName) => { 126 | return extendType(destinationSchema, destinationSchema.subscriptionType, sourceSchema, sourceTypeName); 127 | } 128 | 129 | 130 | const extendType = (destinationSchema, destinationTypeName, sourceSchema, sourceTypeName, exclude = []) => { 131 | 132 | const destination = destinationSchema.types[destinationTypeName]; 133 | const source = sourceSchema.types[sourceTypeName]; 134 | 135 | const fields = source.fields.filter(f => !exclude.includes(f.name)); 136 | const extensions = into({}, fields.map(f => [f.name, sourceSchema.name])); 137 | 138 | if (destination) { 139 | return { 140 | type: mergeFields(destination, fields), 141 | extensions 142 | }; 143 | } else { 144 | return { 145 | type: set(source, 'name', destinationTypeName), 146 | extensions 147 | }; 148 | } 149 | 150 | } 151 | 152 | const mergeFields = (type, fields) => { 153 | const duplicateFields = intersect(type.fields.map(f => f.name), fields.map(f => f.name)); 154 | if (duplicateFields.length > 0) { 155 | throw new Error(`Invalid Merge : type ${type.name} has definitions with duplicate fields: ${duplicateFields.join(', ')}`); 156 | } 157 | 158 | return update(type, 'fields', fs => [...fs, ...fields]); 159 | } 160 | 161 | export const jsonToSchema = (name, schemaJson) => { 162 | const schema = schemaJson.data.__schema; 163 | 164 | // annoying but can't object.map so wutevs 165 | const rootTypes = into({}, 166 | pairs(pick(schema, ...ROOT_TYPES)) 167 | .map(([k,v]) => [k, v.name]) 168 | ); 169 | 170 | const typeMap = into({}, schema.types.map(type => [type.name, type])); 171 | 172 | return { 173 | name, 174 | ...rootTypes, 175 | types: typeMap 176 | }; 177 | 178 | } 179 | 180 | export const emptySchema = options => { 181 | const rootTypes = pick(options, ...ROOT_TYPES); 182 | return { 183 | ...rootTypes, 184 | types: { } 185 | }; 186 | } 187 | 188 | const implementsNode = type => { 189 | const interfaces = type.interfaces || []; 190 | return interfaces.some(i => i.name === 'Node'); 191 | } 192 | 193 | const OPTION_KEYS = ['queryType', 'mutationType']; 194 | const REQUIRED_OPTIONS = ['queryType']; 195 | 196 | const assertOptionsValid = options => { 197 | const invalidOptionKeys = difference(Object.keys(options), OPTION_KEYS); 198 | 199 | if (invalidOptionKeys.length > 0) { 200 | throw new Error(`Invalid Options : unknown option(s) : ${invalidOptionKeys.join(', ')}`); 201 | } 202 | 203 | const missingRequiredKeys = difference(REQUIRED_OPTIONS, Object.keys(options)); 204 | 205 | if (missingRequiredKeys.length > 0) { 206 | throw new Error(`Invalid Options : missing required option(s) : ${missingRequiredKeys.join(', ')}`); 207 | } 208 | 209 | } 210 | 211 | /// HIDEOUS BUT UNDERSTANDABLE /// 212 | 213 | const assertEquivalent = (destination, source) => { 214 | if (!destination) { 215 | return; 216 | } 217 | 218 | if (source.kind !== destination.kind) { 219 | throw new Error(`Merge Exception : type ${typeName} has definitions of different kinds : ${destination.kind}, ${source.kind}`); 220 | } 221 | 222 | const kind = destination.kind; 223 | 224 | switch(kind) { 225 | case 'UNION': 226 | throw new Error(`Merge Exception : merging UNION types is not supported : ${destination.name}`); 227 | case 'INTERFACE': 228 | throw new Error(`Merge Exception : merging INTERFACE types is not supported : ${destination.name}`); 229 | case 'OBJECT': 230 | assertObjectsEquivalent(destination, source); 231 | break; 232 | case 'INPUT_OBJECT': 233 | assertInputObjectsEquivalent(destination, source); 234 | break; 235 | case 'ENUM': 236 | assertEnumsEquivalent(destination, source); 237 | break; 238 | case 'SCALAR': 239 | // nothing to assert for scalars 240 | break; 241 | default: 242 | throw new Error(`Merge Exception : unsupported type kind ${kind} : file issue please, thanks friend!`); 243 | } 244 | } 245 | 246 | 247 | // OBJECT 248 | // kind 249 | // name 250 | // description 251 | // fields 252 | // inputFields 253 | // interfaces 254 | // enumValues 255 | // possibleTypes 256 | 257 | const assertObjectsEquivalent = (objectA, objectB) => { 258 | const unmatchedFields = symmetricDifference(objectA.fields, objectB.fields, 'name'); 259 | 260 | if (unmatchedFields.length > 0) { 261 | throw new Error(`Invalid Merge : OBJECT type ${objectA.name} has duplicate definitions with different fields : ${unmatchedFields.join(', ')}.`); 262 | } 263 | 264 | const fieldError = objectA.fields.reduce((error, fieldA) => { 265 | if (error) { 266 | return error; 267 | } else { 268 | const fieldB = objectB.fields.find(f => f.name === fieldA.name); 269 | return validateFieldsEquivalent(fieldA, fieldB); 270 | } 271 | }, null); 272 | 273 | if (fieldError) { 274 | throw new Error(`Invalid Merge : OBJECT type ${objectA.name} has non-equivalent fields : ${fieldError}`); 275 | } 276 | 277 | } 278 | 279 | const assertInputObjectsEquivalent = (objectA, objectB) => { 280 | const unmatchedFields = symmetricDifference(objectA.inputFields, objectB.inputFields, 'name'); 281 | 282 | if (unmatchedFields.length > 0) { 283 | throw new Error(`Invalid Merge : INPUT_OBJECT type ${objectA.name} has duplicate definitions with different fields : ${unmatchedFields.join(', ')}.`); 284 | } 285 | 286 | const fieldError = objectA.inputFields.reduce((error, fieldA) => { 287 | if (error) { 288 | return error; 289 | } else { 290 | const fieldB = objectB.inputFields.find(f => f.name === fieldA.name); 291 | return validateFieldsEquivalent(fieldA, fieldB); 292 | } 293 | }, null); 294 | 295 | if (fieldError) { 296 | throw new Error(`Invalid Merge : INPUT_OBJECT type ${objectA.name} has non-equivalent fields : ${fieldError}`); 297 | } 298 | 299 | } 300 | 301 | const assertEnumsEquivalent = (enumA, enumB) => { 302 | const unmatchedValues = symmetricDifference(enumA.enumValues, enumB.enumValues, 'name'); 303 | 304 | if (unmatchedValues.length > 0) { 305 | throw new Error(`Invalid Merge : ENUM type ${enumA.name} has duplicate definitions with different values : ${unmatchedValues.join(', ')}.`); 306 | } 307 | 308 | } 309 | 310 | 311 | // FIELD 312 | // name 313 | // description 314 | // args 315 | // type 316 | // isDeprecated 317 | // deprecationReason 318 | 319 | const validateFieldsEquivalent = (fieldA, fieldB) => { 320 | // make sure args equivalent 321 | const unmatchedArgs = symmetricDifference(fieldA.args, fieldB.args, 'name'); 322 | 323 | if (unmatchedArgs.length > 0) { 324 | return `field ${fieldA.name} has duplicate definitions with different args : ${unmatchedArgs.join(', ')}.`; 325 | } 326 | 327 | const argError = fieldA.args.reduce((error, argA) => { 328 | if (error) { 329 | return error; 330 | } else { 331 | const argB = fieldB.args.find(a => a.name === argA.name); 332 | return validateArgsEquivalent(argA, argB); 333 | } 334 | }, null); 335 | 336 | if (argError) { 337 | return `field ${fieldA.name} non-equivalent args : ${argError}`; 338 | } 339 | 340 | // make sure type equivalent 341 | const typeError = validateTypesEquivalent(fieldA.type, fieldB.type); 342 | if (typeError) { 343 | return `field ${fieldA.name} non-equivalent types : ${typeError}`; 344 | } 345 | 346 | } 347 | 348 | 349 | // ARG 350 | // name 351 | // description 352 | // type 353 | // defaultValue 354 | 355 | const validateArgsEquivalent = (argA, argB) => { 356 | 357 | if (argA.defaultValue !== argB.defaultValue) { 358 | return `args have different default values : ${argA.defaultValue}, ${argB.defaultValue}`; 359 | } 360 | 361 | const typeError = validateTypesEquivalent(argA.type, argB.type); 362 | if (typeError) { 363 | return `args non-equivalent types : ${typeError}`; 364 | } 365 | } 366 | 367 | 368 | // TYPE 369 | // kind 370 | // name 371 | // ofType 372 | 373 | const validateTypesEquivalent = (typeA, typeB) => { 374 | if (!typeA && !typeB) { 375 | return null; 376 | } 377 | 378 | if (!typeA || !typeB) { 379 | return `types do not match : ${typeA}, ${typeB}`; 380 | } 381 | 382 | if (typeA.kind !== typeB.kind) { 383 | return `type kinds do not match : ${typeA.kind}, ${typeB.kind}`; 384 | } 385 | 386 | if (typeA.name !== typeB.name) { 387 | return `type names do not match : ${typeA.name}, ${typeB.name}`; 388 | } 389 | 390 | const typeOfError = validateTypesEquivalent(typeA.ofType, typeB.ofType); 391 | if (typeOfError) { 392 | return `type typeOf do not match : ${typeOfError}`; 393 | } 394 | } 395 | 396 | 397 | const symmetricDifference = (thingsA, thingsB, idProp) => { 398 | const idsA = thingsA.map(t => t[idProp]); 399 | const idsB = thingsB.map(t => t[idProp]); 400 | 401 | return union( 402 | difference(idsA, idsB), 403 | difference(idsB, idsA) 404 | ); 405 | } 406 | --------------------------------------------------------------------------------