├── .gitignore ├── .travis.yml ├── LICENSE ├── Makefile ├── README.md ├── apps ├── sw_core │ ├── .gitignore │ ├── priv │ │ └── sw.schema │ └── src │ │ ├── Makefile │ │ ├── sw_core.app.src │ │ ├── sw_core_app.erl │ │ ├── sw_core_db.erl │ │ ├── sw_core_db.hrl │ │ ├── sw_core_enum.erl │ │ ├── sw_core_faction.erl │ │ ├── sw_core_film.erl │ │ ├── sw_core_id.erl │ │ ├── sw_core_mutation.erl │ │ ├── sw_core_object.erl │ │ ├── sw_core_paginate.erl │ │ ├── sw_core_person.erl │ │ ├── sw_core_planet.erl │ │ ├── sw_core_query.erl │ │ ├── sw_core_scalar.erl │ │ ├── sw_core_species.erl │ │ ├── sw_core_starship.erl │ │ ├── sw_core_sup.erl │ │ ├── sw_core_type.erl │ │ └── sw_core_vehicle.erl └── sw_web │ ├── .gitignore │ ├── priv │ └── site │ │ ├── assets │ │ ├── graphiql.css │ │ ├── graphiql.js │ │ └── graphiql.min.js │ │ └── index.html │ └── src │ ├── Makefile │ ├── sw_web.app.src │ ├── sw_web_app.erl │ ├── sw_web_graphql_handler.erl │ ├── sw_web_response.erl │ └── sw_web_sup.erl ├── config ├── sys.config └── vm.args ├── db └── FALLBACK.BUP ├── doc ├── Makefile ├── book.asciidoc ├── code.asciidoc ├── enum_resolution.asciidoc ├── errors.asciidoc ├── getting_started.asciidoc ├── graphiql.asciidoc ├── images │ └── graphiql.png ├── introduction.asciidoc ├── object_resolution.asciidoc ├── relay_modern.asciidoc ├── scalar_resolution.asciidoc ├── schema.asciidoc ├── security.asciidoc ├── system_tour.asciidoc ├── terms.asciidoc ├── transports.asciidoc ├── tricks.asciidoc ├── type_resolution.asciidoc └── why_graphql.asciidoc ├── fixtures ├── films.json ├── people.json ├── planets.json ├── species.json ├── starships.json ├── transport.json └── vehicles.json ├── images └── graphiql.png ├── index.html ├── rebar.config ├── rebar.lock ├── talks └── euc-2017 │ ├── Makefile │ ├── compiler.dot │ ├── graphql.pdf │ └── graphql.tex └── test ├── Makefile ├── sw_SUITE.erl └── sw_SUITE_data ├── advanced.query ├── advanced.result ├── bwing.query ├── bwing.result ├── enum.query ├── enum.result ├── faction.query ├── faction.result ├── films.json ├── first.query ├── first.result ├── mutation.input ├── mutation.query ├── mutation.result ├── people.json ├── planets.json ├── species.json ├── starships.json ├── transport.json └── vehicles.json /.gitignore: -------------------------------------------------------------------------------- 1 | _build 2 | /doc/book.html 3 | /doc/book.xml 4 | /talks/euc-2017/compiler.pdf 5 | /talks/euc-2017/compiler.png 6 | /talks/euc-2017/graphql.aux 7 | /talks/euc-2017/graphql.log 8 | /talks/euc-2017/graphql.nav 9 | /talks/euc-2017/graphql.out 10 | /talks/euc-2017/graphql.snm 11 | /talks/euc-2017/graphql.toc 12 | /talks/euc-2017/graphql.vrb 13 | talks/euc-2017/auto/ 14 | -------------------------------------------------------------------------------- /.travis.yml: -------------------------------------------------------------------------------- 1 | language: erlang 2 | sudo: false 3 | 4 | os: linux 5 | otp_release: 19.3 6 | 7 | matrix: 8 | include: 9 | - os: linux 10 | otp_release: 19.3 11 | - os: linux 12 | otp_release: 20.1 13 | 14 | script: "make compile documentation test dialyzer REBAR=./rebar3" 15 | 16 | before_install: 17 | - gem install asciidoctor 18 | 19 | cache: 20 | directories: 21 | - $HOME/.cache/rebar3 22 | 23 | install: 24 | - wget https://s3.amazonaws.com/rebar3/rebar3 && chmod +x rebar3 25 | 26 | notifications: 27 | recipients: 28 | - jesper.louis.andersen@gmail.com 29 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Copyright 2016,2017 ShopGun ApS. 2 | 3 | Licensed under the Apache License, Version 2.0 (the "License"); 4 | you may not use this file except in compliance with the License. 5 | You may obtain a copy of the License at 6 | 7 | http://www.apache.org/licenses/LICENSE-2.0 8 | 9 | Unless required by applicable law or agreed to in writing, software 10 | distributed under the License is distributed on an "AS IS" BASIS, 11 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | See the License for the specific language governing permissions and 13 | limitations under the License. 14 | -------------------------------------------------------------------------------- /Makefile: -------------------------------------------------------------------------------- 1 | REBAR=rebar3 2 | 3 | .PHONY: compile shell-schema release dialyzer publish documentation test 4 | 5 | compile: 6 | $(REBAR) compile 7 | 8 | ## Rebar3 advertises that its shell command boots the system with a 9 | ## changed path: 10 | ## 11 | ## Start a shell with project and deps preloaded similar to 12 | ## 'erl -pa ebin -pa deps/*/ebin'. 13 | ## 14 | ## It doesn't. It also starts the applications. We don't want 15 | ## the applications started, hence this little blurb: 16 | shell-schema: 17 | erl -pa `$(REBAR) path` -name sw@127.0.0.1 18 | 19 | release: 20 | $(REBAR) release 21 | 22 | dialyzer: 23 | $(REBAR) dialyzer 24 | 25 | test: 26 | $(REBAR) ct 27 | 28 | DOC_SOURCES = $(wildcard doc/*.asciidoc) 29 | IMAGE_SOURCES = $(wildcard doc/images/*) 30 | doc/book.html: $(DOC_SOURCES) 31 | asciidoctor doc/book.asciidoc 32 | 33 | index.html: doc/book.html 34 | cp doc/book.html index.html 35 | 36 | documentation: doc/book.html 37 | publish: index.html 38 | touch doc/book.asciidoc # Bump the last-updated-label 39 | cp -r $(IMAGE_SOURCES) images 40 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | [![Build Status](https://travis-ci.org/shopgun/graphql-erlang-tutorial.svg?branch=master)](https://travis-ci.org/shopgun/graphql-erlang-tutorial) 2 | 3 | 4 | # A tutorial for GraphQL-Erlang 5 | 6 | This repository contains a tutorial for the graphql-erlang system. It 7 | implements (a subset of) the SWAPI as an example project in order to 8 | clarify how the system is supposed to be used in a full 9 | implementation. The idea is that it can be used as a starting point for 10 | your own implementation of your own GraphQL schema. 11 | 12 | # Documentation 13 | 14 | The primary URL for the document is: 15 | 16 | https://shopgun.github.io/graphql-erlang-tutorial/ 17 | 18 | To read the latest version of the documentation, go there and start 19 | reading! 20 | 21 | ## Building 22 | 23 | To build this software you need: 24 | 25 | * rebar3 26 | * Erlang/OTP - Version 19.3.3 was used in preparing this document 27 | 28 | The needed dependencies will be pulled in as part of building the 29 | software. 30 | 31 | To build the documentation you need: 32 | 33 | * asciidoctor - We use asciidoctor's tagging feature to refer to 34 | source code in the repository inside the documentation. I installed 35 | it with `gem install asciidoctor` 36 | 37 | In addition you need: 38 | 39 | * A decent amount of Erlang knowledge. This document doesn't explain 40 | Erlang in any way, and assumes prior knowledge. 41 | * A decent amount of web knowledge. 42 | * Some GraphQL knowledge. If you don't know what GraphQL is, this 43 | document may not be the best initial exposition. Some terminology 44 | is taken for granted in advance. On the other hand, this document 45 | explains how GraphQL fits into the Erlang world. 46 | -------------------------------------------------------------------------------- /apps/sw_core/.gitignore: -------------------------------------------------------------------------------- 1 | .rebar3 2 | _* 3 | .eunit 4 | *.o 5 | *.beam 6 | *.plt 7 | *.swp 8 | *.swo 9 | .erlang.cookie 10 | ebin 11 | log 12 | erl_crash.dump 13 | .rebar 14 | logs 15 | _build 16 | .idea 17 | rebar3.crashdump 18 | -------------------------------------------------------------------------------- /apps/sw_core/priv/sw.schema: -------------------------------------------------------------------------------- 1 | # tag::scalarDateTime[] 2 | scalar DateTime 3 | # end::scalarDateTime[] 4 | 5 | # tag::enumEpisode[] 6 | enum Episode { 7 | PHANTOM 8 | CLONES 9 | SITH 10 | NEWHOPE 11 | EMPIRE 12 | JEDI 13 | } 14 | # end::enumEpisode[] 15 | 16 | # tag::nodeInterface[] 17 | +description(text: "Relay Modern Node Interface") 18 | interface Node { 19 | +description(text: "Unique Identity of a Node") 20 | id : ID! 21 | } 22 | # end::nodeInterface[] 23 | 24 | # tag::transportInterface[] 25 | interface Transport { 26 | id : ID! 27 | edited : DateTime 28 | consumables : String 29 | name : String 30 | created : DateTime 31 | cargoCapacity : Float 32 | passengers : String 33 | maxAtmospheringSpeed : Int 34 | crew : String 35 | length : Float 36 | model : String 37 | costInCredits : Float 38 | manufacturers : [String] 39 | } 40 | # end::transportInterface[] 41 | 42 | type PageInfo { 43 | hasNextPage : Boolean! 44 | hasPreviousPage : Boolean! 45 | } 46 | 47 | type Vehicle implements Node, Transport { 48 | id : ID! 49 | name : String 50 | model : String 51 | vehicleClass : String 52 | manufacturers : [String] 53 | costInCredits : Float 54 | length : Float 55 | crew : String 56 | passengers : String 57 | maxAtmospheringSpeed : Int 58 | cargoCapacity : Float 59 | consumables : String 60 | created: DateTime 61 | edited: DateTime 62 | pilotConnection( 63 | after : String 64 | first : Int 65 | before : String 66 | last : Int) : VehiclePilotsConnection 67 | filmConnection( 68 | after : String 69 | first : Int 70 | before: String 71 | last: Int) : VehicleFilmsConnection 72 | } 73 | 74 | # tag::starshipObject[] 75 | +description(text: "Representation of Star Ships") 76 | type Starship implements Node, Transport { 77 | id : ID! 78 | name : String 79 | model : String 80 | starshipClass : String 81 | manufacturers : [String] 82 | costInCredits : Float 83 | length : Float 84 | crew : String 85 | passengers : String 86 | maxAtmospheringSpeed : Int 87 | hyperdriveRating : Float 88 | MGLT : Int 89 | cargoCapacity : Float 90 | consumables : String 91 | created: DateTime 92 | edited: DateTime 93 | # end::starshipObject[] 94 | pilotConnection( 95 | after : String 96 | first : Int 97 | before : String 98 | last : Int) : StarshipPilotsConnection 99 | filmConnection( 100 | after : String 101 | first : Int 102 | before: String 103 | last: Int) : StarshipFilmsConnection 104 | } 105 | 106 | # tag::filmObject[] 107 | type Film implements Node { 108 | title : String 109 | episode : Episode 110 | episodeID : Int 111 | openingCrawl : String 112 | director : String 113 | producers : [String] 114 | releaseDate : DateTime 115 | speciesConnection(after: String, first: Int, 116 | before: String, last: Int) 117 | : FilmSpeciesConnection 118 | starshipConnection(after: String, first: Int, 119 | before: String, last: Int) 120 | : StarshipConnection 121 | vehicleConnection(after: String, first: Int, 122 | before: String, last: Int) 123 | : FilmVehicleConnection 124 | planetConnection(after: String, first: Int, 125 | before: String, last: Int) 126 | : FilmPlanetConnection 127 | characterConnection(after: String, first: Int, 128 | before: String, last: Int) 129 | : FilmCharactersConnection 130 | 131 | created : DateTime 132 | edited : DateTime 133 | id : ID! 134 | } 135 | # end::filmObject[] 136 | 137 | # tag::speciesObject[] 138 | type Species implements Node { 139 | name : String 140 | classification : String 141 | designation : String 142 | averageHeight : String 143 | averageLifespan : String 144 | eyeColors : [String] 145 | hairColors : [String] 146 | skinColors : [String] 147 | language : String 148 | homeworld : Planet 149 | personConnection(after: String, first: Int, 150 | before: String, last: Int) 151 | : SpeciesPeopleConnection 152 | filmConnection(after: String, first: Int, 153 | before: String, last: Int) 154 | : SpeciesFilmConnection 155 | created : DateTime 156 | edited : DateTime 157 | id : ID! 158 | } 159 | # end::speciesObject[] 160 | 161 | # tag::planetObject[] 162 | type Planet implements Node { 163 | name : String 164 | diameter : Int 165 | rotationPeriod : Int 166 | orbitalPeriod : Int 167 | gravity : String 168 | population : Int 169 | climate : String 170 | terrains : [String] 171 | surfaceWater : Int 172 | filmConnection(after: String, first: Int, 173 | before: String, last: Int) 174 | : PlanetFilmsConnection 175 | residentConnection(after: String, first: Int, 176 | before: String, last: Int) 177 | : PlanetResidentsConnection 178 | created : DateTime 179 | edited : DateTime 180 | id : ID! 181 | } 182 | # end::planetObject[] 183 | 184 | type Person implements Node { 185 | name : String 186 | birthYear : String 187 | eyeColor : String 188 | gender : String 189 | hairColor : String 190 | height : Int 191 | mass : Float 192 | skinColor : String 193 | homeworld : Planet 194 | species : Species 195 | created : DateTime 196 | edited : DateTime 197 | id : ID! 198 | } 199 | 200 | # tag::queryObject[] 201 | type Query { 202 | +description(text: "Relay Modern specification Node fetcher") 203 | node(id : ID!) : Node 204 | +description(text: "Fetch a starship with a given Id") 205 | starship(id : ID!) : Starship 206 | allStarships : [Starship] 207 | allPlanets : [Planet] 208 | allPeople : [Person] 209 | allVehicles : [Vehicle] 210 | allSpecies : [Species] 211 | allFilms : [Film] 212 | filmByEpisode(episode: Episode) : Film! 213 | } 214 | # end::queryObject[] 215 | 216 | type Faction implements Node { 217 | id : ID! 218 | name : String 219 | ships(first: Int, after: String, 220 | last: Int, before: String) 221 | : StarshipConnection 222 | } 223 | 224 | # tag::mutationObject[] 225 | type Mutation { 226 | introduceFaction(input: IntroduceFactionInput!) 227 | : IntroduceFactionPayload 228 | introduceStarship(input: IntroduceStarshipInput!) 229 | : IntroduceStarshipPayload 230 | } 231 | # end::mutationObject[] 232 | 233 | ## -- MUTATION OBJECTS ---------- 234 | 235 | input IntroduceFactionInput { 236 | clientMutationId : String 237 | name : String! 238 | } 239 | 240 | type IntroduceFactionPayload { 241 | clientMutationId : String 242 | faction : Faction 243 | } 244 | 245 | # tag::introduceStarshipInput[] 246 | input IntroduceStarshipInput { 247 | clientMutationId : String 248 | 249 | name : String 250 | model : String 251 | starshipClass : String! 252 | manufacturers : [String] = [] # <1> 253 | costInCredits : Float! 254 | length : Float! 255 | crew : String! 256 | 257 | faction : ID! 258 | } 259 | # end::introduceStarshipInput[] 260 | 261 | # tag::introduceStarshipPayload[] 262 | type IntroduceStarshipPayload { 263 | clientMutationId : String 264 | faction : Faction 265 | starship : Starship 266 | } 267 | # end::introduceStarshipPayload[] 268 | 269 | ## -- CONNECTION OBJECTS -------- 270 | ## 271 | ## This section implements connection objects according to the relay modern conventions 272 | 273 | type PlanetFilmsConnection { 274 | pageInfo : PageInfo! 275 | edges : [PlanetFilmsEdge] 276 | totalCount : Int 277 | } 278 | 279 | type PlanetFilmsEdge { 280 | node : Film 281 | cursor : String! 282 | } 283 | 284 | type PlanetResidentsConnection { 285 | pageInfo : PageInfo! 286 | edges : [PlanetResidentsEdge] 287 | totalCount : Int 288 | } 289 | 290 | type PlanetResidentsEdge { 291 | node : Person 292 | cursor : String! 293 | } 294 | 295 | type FilmSpeciesConnection { 296 | pageInfo : PageInfo! 297 | edges : [FilmSpeciesEdge] 298 | totalCount : Int 299 | species : [Species] 300 | } 301 | 302 | type FilmSpeciesEdge { 303 | node : Species 304 | cursor : String! 305 | } 306 | 307 | type SpeciesFilmConnection { 308 | pageInfo : PageInfo! 309 | edges : [SpeciesFilmEdge] 310 | totalCount : Int 311 | } 312 | 313 | type SpeciesFilmEdge { 314 | node : Film 315 | cursor : String! 316 | } 317 | 318 | type SpeciesPeopleConnection { 319 | pageInfo : PageInfo! 320 | edges : [SpeciesPeopleEdge] 321 | totalCount : Int 322 | } 323 | 324 | type SpeciesPeopleEdge { 325 | node : Person 326 | cursor : String! 327 | } 328 | 329 | type FilmPlanetConnection { 330 | pageInfo : PageInfo! 331 | edges : [FilmPlanetEdge] 332 | totalCount : Int 333 | } 334 | 335 | type FilmPlanetEdge { 336 | node : Planet 337 | cursor : String! 338 | } 339 | 340 | type VehiclePilotsConnection { 341 | pageInfo : PageInfo! 342 | edges : [VehiclePilotsEdge] 343 | totalCount : Int 344 | } 345 | 346 | type VehiclePilotsEdge { 347 | node : Person 348 | cursor : String! 349 | } 350 | 351 | type VehicleFilmsConnection { 352 | pageInfo : PageInfo! 353 | edges : [VehicleFilmsEdge] 354 | totalCount : Int 355 | } 356 | 357 | type VehicleFilmsEdge { 358 | node : Film 359 | cursor : String! 360 | } 361 | 362 | type StarshipConnection { 363 | pageInfo : PageInfo! 364 | edges : [StarshipEdge] 365 | totalCount : Int 366 | } 367 | 368 | type StarshipEdge { 369 | node : Starship 370 | cursor : String! 371 | } 372 | 373 | type FilmVehicleConnection { 374 | pageInfo : PageInfo! 375 | edges : [FilmVehicleEdge] 376 | totalCount : Int 377 | } 378 | 379 | type FilmVehicleEdge { 380 | node : Vehicle 381 | cursor : String! 382 | } 383 | 384 | type FilmCharactersConnection { 385 | pageInfo : PageInfo! 386 | edges : [FilmCharactersEdge] 387 | totalCount : Int 388 | } 389 | 390 | type FilmCharactersEdge { 391 | node : Person 392 | cursor : String! 393 | } 394 | 395 | type StarshipPilotsConnection { 396 | pageInfo : PageInfo 397 | edges : [StarshipPilotsEdge] 398 | totalCount : Int 399 | } 400 | 401 | type StarshipPilotsEdge { 402 | node : Person 403 | cursor : String! 404 | } 405 | 406 | type StarshipFilmsConnection { 407 | pageInfo : PageInfo 408 | edges : [StarshipFilmsEdge] 409 | totalCount : Int 410 | } 411 | 412 | type StarshipFilmsEdge { 413 | node : Film 414 | cursor : String! 415 | } 416 | 417 | -------------------------------------------------------------------------------- /apps/sw_core/src/Makefile: -------------------------------------------------------------------------------- 1 | compile: 2 | $(MAKE) -C ../../.. compile 3 | -------------------------------------------------------------------------------- /apps/sw_core/src/sw_core.app.src: -------------------------------------------------------------------------------- 1 | {application, sw_core, 2 | [{description, "An OTP application"}, 3 | {vsn, "0.1.0"}, 4 | {registered, []}, 5 | {mod, { sw_core_app, []}}, 6 | {applications, 7 | [kernel, 8 | stdlib, 9 | mnesia, 10 | 11 | iso8601, 12 | jsx, 13 | lager, 14 | graphql 15 | ]}, 16 | {env,[ 17 | {schema_file, "sw.schema"} 18 | ]}, 19 | {modules, []}, 20 | 21 | {maintainers, []}, 22 | {licenses, []}, 23 | {links, []} 24 | ]}. 25 | -------------------------------------------------------------------------------- /apps/sw_core/src/sw_core_app.erl: -------------------------------------------------------------------------------- 1 | %%%------------------------------------------------------------------- 2 | %% @doc sw_core public API 3 | %% @end 4 | %%%------------------------------------------------------------------- 5 | 6 | -module(sw_core_app). 7 | 8 | -behaviour(application). 9 | 10 | %% Application callbacks 11 | -export([start/2, stop/1]). 12 | 13 | %%==================================================================== 14 | %% API 15 | %%==================================================================== 16 | 17 | start(_StartType, _StartArgs) -> 18 | {ok, Pid} = sw_core_sup:start_link(), 19 | ok = load_schema(), 20 | ok = sw_core_db:wait_for_tables(), 21 | {ok, Pid}. 22 | 23 | %%-------------------------------------------------------------------- 24 | stop(_State) -> 25 | ok. 26 | 27 | %%==================================================================== 28 | %% Internal functions 29 | %%==================================================================== 30 | %% tag::schemaMapping[] 31 | mapping_rules() -> 32 | #{ 33 | scalars => #{ default => sw_core_scalar }, 34 | interfaces => #{ default => sw_core_type }, 35 | unions => #{ default => sw_core_type }, 36 | enums => #{ 'Episode' => sw_core_enum, 37 | default => sw_core_enum }, 38 | objects => #{ 39 | 'Planet' => sw_core_planet, 40 | 'Film' => sw_core_film, 41 | 'Species' => sw_core_species, 42 | 'Vehicle' => sw_core_vehicle, 43 | 'Starship' => sw_core_starship, 44 | 'Person' => sw_core_person, 45 | 46 | 'Faction' => sw_core_faction, 47 | 48 | 'Query' => sw_core_query, 49 | 'Mutation' => sw_core_mutation, 50 | default => sw_core_object } 51 | }. 52 | %% end::schemaMapping[] 53 | 54 | %% tag::loadSchema[] 55 | load_schema() -> 56 | {ok, SchemaFile} = application:get_env(sw_core, schema_file), 57 | PrivDir = code:priv_dir(sw_core), 58 | {ok, SchemaData} = file:read_file( 59 | filename:join(PrivDir, SchemaFile)), 60 | Mapping = mapping_rules(), 61 | ok = graphql:load_schema(Mapping, SchemaData), 62 | ok = setup_root(), 63 | ok = graphql:validate_schema(), 64 | ok. 65 | %% end::loadSchema[] 66 | 67 | %% tag::setupRoot[] 68 | setup_root() -> 69 | Root = {root, 70 | #{ query => 'Query', 71 | mutation => 'Mutation', 72 | interfaces => ['Node'] 73 | }}, 74 | ok = graphql:insert_schema_definition(Root), 75 | ok. 76 | %% end::setupRoot[] 77 | 78 | -------------------------------------------------------------------------------- /apps/sw_core/src/sw_core_db.hrl: -------------------------------------------------------------------------------- 1 | %% Database Schema definition files 2 | 3 | %% tag::factionRecord[] 4 | -record(faction, 5 | {id :: integer(), 6 | name :: integer()}). 7 | %% end::factionRecord[] 8 | 9 | %% tag::starshipRecord[] 10 | -record(starship, 11 | {id :: integer(), 12 | pilots :: [integer()], 13 | mglt :: integer(), 14 | starship_class :: binary(), 15 | hyperdrive_rating :: float() 16 | }). 17 | %% end::starshipRecord[] 18 | 19 | %% tag::transportRecord[] 20 | -record(transport, 21 | {id :: integer(), 22 | edited :: calendar:datetime(), 23 | consumables :: binary() | undefined, 24 | name :: binary(), 25 | created :: calendar:datetime(), 26 | cargo_capacity :: float() | nan, 27 | passengers :: binary() | undefined, 28 | crew :: binary(), 29 | length :: float(), 30 | model :: binary(), 31 | cost :: float(), 32 | max_atmosphering_speed :: integer(), 33 | manufacturers :: [binary()], 34 | faction = undefined :: undefined | integer() 35 | }). 36 | %% end::transportRecord[] 37 | 38 | -record(film, 39 | {id :: integer(), 40 | edited :: calendar:datetime(), 41 | created :: calendar:datetime(), 42 | starships :: [integer()], 43 | species :: [integer()], 44 | vehicles :: [integer()], 45 | planets :: [integer()], 46 | producers :: [binary()], 47 | title :: binary(), 48 | episode :: atom(), 49 | episode_id :: integer(), 50 | director :: binary(), 51 | release_date :: binary(), 52 | opening_crawl :: binary(), 53 | characters :: [integer()] }). 54 | 55 | -record(species, 56 | {id :: integer(), 57 | edited :: binary(), 58 | created :: binary(), 59 | classification :: binary(), 60 | name :: binary(), 61 | designation :: binary(), 62 | eye_colors :: [binary()], 63 | people :: [integer()], 64 | skin_colors :: [binary()], 65 | language :: binary(), 66 | hair_colors :: [binary()], 67 | homeworld :: binary(), 68 | average_lifespan :: integer(), 69 | average_height :: integer() 70 | }). 71 | 72 | -record(person, 73 | {id :: integer(), 74 | edited :: calendar:datetime(), 75 | name :: binary(), 76 | created :: calendar:datetime(), 77 | gender :: binary(), 78 | skin_color :: binary(), 79 | hair_color :: binary(), 80 | height :: integer(), 81 | eye_color :: binary(), 82 | mass :: float() | nan, 83 | homeworld :: integer(), 84 | birth_year :: binary() 85 | }). 86 | 87 | %% tag::planetRecord[] 88 | -record(planet, 89 | {id :: integer(), 90 | edited :: calendar:datetime(), 91 | climate :: binary(), 92 | surface_water :: integer(), 93 | name :: binary(), 94 | diameter :: integer(), 95 | rotation_period :: integer(), 96 | created :: calendar:datetime(), 97 | terrains :: [binary()], 98 | gravity :: binary(), 99 | orbital_period :: integer() | nan, 100 | population :: integer() | nan 101 | }). 102 | %% end::planetRecord[] 103 | 104 | -record(vehicle, 105 | {id :: integer(), 106 | vehicle_class :: binary(), 107 | pilots :: [integer()]}). 108 | 109 | -record(sequences, {key, value}). 110 | -------------------------------------------------------------------------------- /apps/sw_core/src/sw_core_enum.erl: -------------------------------------------------------------------------------- 1 | %% tag::coreEnum[] 2 | -module(sw_core_enum). 3 | 4 | -export([input/2, output/2]). 5 | 6 | %% Input mapping <1> 7 | input(<<"Episode">>, <<"PHANTOM">>) -> {ok, 'PHANTOM'}; 8 | input(<<"Episode">>, <<"CLONES">>) -> {ok, 'CLONES'}; 9 | input(<<"Episode">>, <<"SITH">>) -> {ok, 'SITH'}; 10 | input(<<"Episode">>, <<"NEWHOPE">>) -> {ok, 'NEWHOPE'}; 11 | input(<<"Episode">>, <<"EMPIRE">>) -> {ok, 'EMPIRE'}; 12 | input(<<"Episode">>, <<"JEDI">>) -> {ok, 'JEDI'}. 13 | 14 | %% Output mapping <2> 15 | output(<<"Episode">>, Episode) -> 16 | {ok, atom_to_binary(Episode, utf8)}. 17 | %% end::coreEnum[] 18 | -------------------------------------------------------------------------------- /apps/sw_core/src/sw_core_faction.erl: -------------------------------------------------------------------------------- 1 | -module(sw_core_faction). 2 | -include("sw_core_db.hrl"). 3 | -include_lib("stdlib/include/qlc.hrl"). 4 | 5 | -export([execute/4]). 6 | -export([introduce/2]). 7 | 8 | %% tag::starshipExecute[] 9 | execute(_Ctx, #faction { id = ID, 10 | name = Name }, Field, Args) -> 11 | case Field of 12 | <<"id">> -> 13 | {ok, sw_core_id:encode({'Faction', ID})}; 14 | <<"name">> -> 15 | {ok, Name}; 16 | <<"ships">> -> 17 | Txn = fun() -> 18 | QH = qlc:q( 19 | [#{ starship => S, 20 | transport => T } || 21 | S <- mnesia:table(starship), 22 | T <- mnesia:table(transport), 23 | T#transport.faction == ID, 24 | S#starship.id == T#transport.id]), 25 | qlc:e(QH) 26 | end, 27 | {atomic, Records} = mnesia:transaction(Txn), 28 | sw_core_paginate:select(Records, Args) 29 | end. 30 | 31 | %% tag::introduce[] 32 | introduce(_Ctx, #{ <<"name">> := Name }) -> 33 | ID = sw_core_db:nextval(faction), % <1> 34 | Faction = #faction { id = ID, name = Name }, % <2> 35 | Txn = fun() -> 36 | mnesia:write(Faction) % <3> 37 | end, 38 | case mnesia:transaction(Txn) of 39 | {atomic, ok} -> 40 | {ok, Faction} % <4> 41 | end. 42 | %% end::introduce[] 43 | 44 | -------------------------------------------------------------------------------- /apps/sw_core/src/sw_core_film.erl: -------------------------------------------------------------------------------- 1 | -module(sw_core_film). 2 | -include("sw_core_db.hrl"). 3 | -include_lib("stdlib/include/qlc.hrl"). 4 | 5 | -export([execute/4]). 6 | 7 | %% tag::filmExecute[] 8 | execute(_Ctx, #film{} = Film, Field, Args) -> 9 | case Field of 10 | %% Plain Fields 11 | <<"id">> -> {ok, sw_core_id:encode({'Film', Film#film.id})}; 12 | <<"edited">> -> {ok, Film#film.edited}; 13 | <<"created">> -> {ok, Film#film.created}; 14 | <<"producers">> -> {ok, [{ok, P} || P <- Film#film.producers]}; 15 | <<"title">> -> {ok, Film#film.title}; 16 | <<"episode">> -> {ok, Film#film.episode}; 17 | <<"episodeID">> -> {ok, Film#film.episode_id}; 18 | <<"director">> -> {ok, Film#film.director}; 19 | <<"releaseDate">> -> {ok, Film#film.release_date}; 20 | <<"openingCrawl">> -> {ok, Film#film.opening_crawl}; 21 | <<"planetConnection">> -> 22 | #film { planets = Planets } = Film, 23 | Txn = fun() -> 24 | [mnesia:read(planet, P) || P <- Planets] 25 | end, 26 | {atomic, Records} = mnesia:transaction(Txn), 27 | sw_core_paginate:select(lists:append(Records), Args); 28 | <<"characterConnection">> -> 29 | #film { characters = Characters } = Film, 30 | Txn = fun() -> 31 | [mnesia:read(person, P) || P <- Characters] 32 | end, 33 | {atomic, Records} = mnesia:transaction(Txn), 34 | sw_core_paginate:select(lists:append(Records), Args); 35 | <<"speciesConnection">> -> 36 | #film { species = Species } = Film, 37 | Txn = fun() -> 38 | [mnesia:read(species, S) || S <- Species] 39 | end, 40 | {atomic, Records} = mnesia:transaction(Txn), 41 | sw_core_paginate:select(lists:append(Records), Args); 42 | <<"starshipConnection">> -> 43 | #film { starships = Starships } = Film, 44 | Txn = fun() -> 45 | QH = qlc:q([#{ starship => Ss, 46 | transport => Tspt } || 47 | Ss <- mnesia:table(starship), 48 | Tspt <- mnesia:table(transport), 49 | Ss#starship.id == Tspt#transport.id, 50 | lists:member(Ss#starship.id, Starships)]), 51 | qlc:e(QH) 52 | end, 53 | {atomic, Records} = mnesia:transaction(Txn), 54 | sw_core_paginate:select(Records, Args); 55 | <<"vehicleConnection">> -> 56 | #film { vehicles = Vehicles } = Film, 57 | Txn = fun() -> 58 | QH = qlc:q([#{ vehicle => V, 59 | transport => Tspt } || 60 | V <- mnesia:table(vehicle), 61 | Tspt <- mnesia:table(transport), 62 | V#vehicle.id == Tspt#transport.id, 63 | lists:member(V#vehicle.id, Vehicles)]), 64 | qlc:e(QH) 65 | end, 66 | {atomic, Records} = mnesia:transaction(Txn), 67 | sw_core_paginate:select(Records, Args) 68 | end. 69 | %% end::filmExecute[] 70 | 71 | -------------------------------------------------------------------------------- /apps/sw_core/src/sw_core_id.erl: -------------------------------------------------------------------------------- 1 | -module(sw_core_id). 2 | 3 | -export([encode/1, decode/1]). 4 | 5 | -spec encode({atom(), integer()}) -> binary(). 6 | %% tag::idEncode[] 7 | encode({Tag, ID}) -> 8 | BinTag = atom_to_binary(Tag, utf8), 9 | IDStr = integer_to_binary(ID), 10 | base64:encode(<>). 11 | %% end::idEncode[] 12 | 13 | -spec decode(binary()) -> {error, Reason} | {ok, {atom(), integer()}} 14 | when 15 | Reason :: term(). 16 | %% tag::idDecode[] 17 | decode(Input) -> 18 | try 19 | Decoded = base64:decode(Input), 20 | case binary:split(Decoded, <<":">>) of 21 | [BinTag, IDStr] -> 22 | {ok, {binary_to_existing_atom(BinTag, utf8), 23 | binary_to_integer(IDStr)}}; 24 | _ -> 25 | exit(invalid) 26 | end 27 | catch 28 | _:_ -> 29 | {error, invalid_decode} 30 | end. 31 | %% end::idDecode[] 32 | -------------------------------------------------------------------------------- /apps/sw_core/src/sw_core_mutation.erl: -------------------------------------------------------------------------------- 1 | -module(sw_core_mutation). 2 | 3 | -export([execute/4]). 4 | 5 | %% tag::execute[] 6 | execute(Ctx, _, Field, #{ <<"input">> := Input}) -> 7 | with_client_mutation(Ctx, Field, Input). 8 | 9 | with_client_mutation(Ctx, Field, Input) -> 10 | {CM, Rest} = maps:take(<<"clientMutationId">>, Input), 11 | case execute_mutation(Ctx, Field, Rest) of 12 | {ok, Payload} -> 13 | {ok, Payload#{ <<"clientMutationId">> => CM }}; 14 | {error, Reason} -> 15 | {error, Reason} 16 | end. 17 | %% end::execute[] 18 | 19 | %% tag::executeMutation[] 20 | execute_mutation(Ctx, <<"introduceFaction">>, Input) -> 21 | {ok, Faction} = sw_core_faction:introduce(Ctx, Input), 22 | {ok, #{ <<"faction">> => Faction }}; 23 | execute_mutation(Ctx, <<"introduceStarship">>, Input) -> 24 | {ok, Faction, Starship} = sw_core_starship:introduce(Ctx, Input), 25 | {ok, #{ <<"faction">> => Faction, 26 | <<"starship">> => Starship }}; 27 | execute_mutation(_Ctx, _Other, _) -> 28 | {error, invalid_mutation}. 29 | %% end::executeMutation[] 30 | 31 | -------------------------------------------------------------------------------- /apps/sw_core/src/sw_core_object.erl: -------------------------------------------------------------------------------- 1 | -module(sw_core_object). 2 | -export([execute/4]). 3 | 4 | %% Assume we are given a map(). Look up the field in the map. If not 5 | %% present, return the value null. 6 | execute(_Ctx, Obj, Field, _Args) -> 7 | {ok, maps:get(Field, Obj, null)}. 8 | -------------------------------------------------------------------------------- /apps/sw_core/src/sw_core_paginate.erl: -------------------------------------------------------------------------------- 1 | -module(sw_core_paginate). 2 | 3 | -export([select/2]). 4 | 5 | -define(DEFAULT_FIRST, 5). 6 | 7 | select(Elements, Args) -> 8 | try 9 | {ok, select_(Elements, Args)} 10 | catch 11 | throw:Err -> 12 | {error, Err} 13 | end. 14 | 15 | %% tag::paginate[] 16 | select_(Elements, 17 | #{ <<"first">> := F, 18 | <<"last">> := L, 19 | <<"after">> := After, 20 | <<"before">> := Before }) -> 21 | {First, Last} = defaults(F, L), % <1> 22 | Count = length(Elements), % <2> 23 | 24 | %% applyCursorsToEdges <3> 25 | Positions = lists:seq(1, Count), 26 | Sliced = apply_cursors_to_edges(After, Before, 27 | lists:zip(Elements, Positions)), 28 | Window = edges_to_return(First, Last, Sliced), % <4> 29 | Edges = format(Window), 30 | 31 | %% Build PageInfo <5> 32 | PageInfo = #{ 33 | <<"hasNextPage">> => has_next(Sliced, First), 34 | <<"hasPreviousPage">> => has_previous(Sliced, Last) 35 | }, 36 | 37 | %% Return result <6> 38 | #{ 39 | <<"totalCount">> => Count, 40 | <<"edges">> => Edges, 41 | <<"pageInfo">> => PageInfo 42 | }. 43 | %% end::paginate[] 44 | 45 | defaults(null, null) -> {?DEFAULT_FIRST, null}; 46 | defaults(F, L) -> {F, L}. 47 | 48 | %% tag::pageInfo[] 49 | has_previous(_Sliced, null) -> false; 50 | has_previous(Sliced, Last) -> length(Sliced) > Last. 51 | 52 | has_next(_Sliced, null) -> false; 53 | has_next(Sliced, First) -> length(Sliced) > First. 54 | %% end::pageInfo[] 55 | 56 | format([]) -> []; 57 | format([{Elem, Pos}|Xs]) -> 58 | X = #{ <<"node">> => Elem, 59 | <<"cursor">> => pack_cursor(Pos)}, 60 | [{ok, X} | format(Xs)]. 61 | 62 | %% tag::edgesToReturn[] 63 | edges_to_return(First, null, Window) -> 64 | Sz = length(Window), 65 | case Sz - First of 66 | K when K =< 0 -> Window; 67 | K when K > 0 -> 68 | {Res, _} = lists:split(First, Window), 69 | Res 70 | end; 71 | edges_to_return(null, Last, Window) -> 72 | lists:reverse( 73 | edges_to_return(Last, null, lists:reverse(Window))). 74 | %% end::edgesToReturn[] 75 | 76 | %% tag::applyCursorsToEdges[] 77 | apply_cursors_to_edges(null, null, Elements) -> 78 | Elements; 79 | apply_cursors_to_edges(null, Before, Elements) -> 80 | Pos = unpack_cursor(Before), 81 | {Res,_} = lists:split(Pos, Elements), 82 | apply_cursors_to_edges(null, null, Res); 83 | apply_cursors_to_edges(After, Before, Elements) -> 84 | Pos = unpack_cursor(After), 85 | {_, Res} = lists:split(Pos, Elements), 86 | apply_cursors_to_edges(null, Before, Res). 87 | %% end::applyCursorsToEdges[] 88 | 89 | %% tag::packCursor[] 90 | pack_cursor(Pos) -> 91 | base64:encode(integer_to_binary(Pos)). 92 | %% end::packCursor[] 93 | 94 | unpack_cursor(Cursor) -> 95 | try 96 | P = base64:decode(Cursor), 97 | binary_to_integer(P) 98 | catch 99 | _:_ -> 100 | throw(bad_cursor_decode) 101 | end. 102 | -------------------------------------------------------------------------------- /apps/sw_core/src/sw_core_person.erl: -------------------------------------------------------------------------------- 1 | -module(sw_core_person). 2 | -include("sw_core_db.hrl"). 3 | -include_lib("stdlib/include/qlc.hrl"). 4 | 5 | -export([execute/4]). 6 | 7 | %% tag::planetExecute[] 8 | execute(_Ctx, #person { id = PersonId } = Person, Field, _Args) -> 9 | case Field of 10 | <<"id">> -> {ok, sw_core_id:encode({'Person', PersonId})}; 11 | <<"edited">> -> {ok, Person#person.edited}; 12 | <<"name">> -> {ok, Person#person.name}; 13 | <<"created">> -> {ok, Person#person.created}; 14 | <<"gender">> -> {ok, Person#person.gender}; 15 | <<"skinColor">> -> {ok, Person#person.skin_color}; 16 | <<"hairColor">> -> {ok, Person#person.hair_color}; 17 | <<"height">> -> {ok, Person#person.height}; 18 | <<"eyeColor">> -> {ok, Person#person.eye_color}; 19 | <<"mass">> -> {ok, floatify(Person#person.mass)}; 20 | <<"homeworld">> -> 21 | Txn = fun() -> 22 | mnesia:read(planet, Person#person.homeworld) 23 | end, 24 | {atomic, [Planet]} = mnesia:transaction(Txn), 25 | {ok, Planet}; 26 | <<"species">> -> 27 | Txn = fun() -> 28 | QH = qlc:q([S || S <- mnesia:table(species), 29 | lists:member(PersonId, S#species.people)]), 30 | qlc:e(QH) 31 | end, 32 | case mnesia:transaction(Txn) of 33 | {atomic, [Species]} -> {ok, Species}; 34 | {atomic, []} -> {ok, null} 35 | end; 36 | <<"birthYear">> -> {ok, Person#person.birth_year} 37 | end. 38 | 39 | floatify(nan) -> null; 40 | floatify(I) -> float(I). 41 | -------------------------------------------------------------------------------- /apps/sw_core/src/sw_core_planet.erl: -------------------------------------------------------------------------------- 1 | -module(sw_core_planet). 2 | -include("sw_core_db.hrl"). 3 | -include_lib("stdlib/include/qlc.hrl"). 4 | 5 | -export([execute/4]). 6 | 7 | %% tag::planetExecute[] 8 | execute(_Ctx, #planet { id = PlanetId } = Planet, Field, Args) -> 9 | case Field of 10 | <<"id">> -> {ok, sw_core_id:encode({'Planet', Planet#planet.id})}; 11 | <<"edited">> -> {ok, Planet#planet.edited}; 12 | <<"climate">> -> {ok, Planet#planet.climate}; 13 | <<"surfaceWater">> -> {ok, Planet#planet.surface_water}; 14 | <<"name">> -> {ok, Planet#planet.name}; 15 | <<"diameter">> -> {ok, integer(Planet#planet.diameter)}; 16 | <<"rotationPeriod">> -> {ok, integer(Planet#planet.rotation_period)}; 17 | %% end::planetExecute[] 18 | <<"filmConnection">> -> 19 | Txn = fun() -> 20 | QH = qlc:q([F || F <- mnesia:table(film), 21 | lists:member(PlanetId, F#film.planets)]), 22 | qlc:e(QH) 23 | end, 24 | {atomic, Films} = mnesia:transaction(Txn), 25 | sw_core_paginate:select(Films, Args); 26 | %% tag::residentConnection[] 27 | <<"residentConnection">> -> 28 | Txn = fun() -> 29 | QH = qlc:q([P || P <- mnesia:table(person), 30 | P#person.homeworld == PlanetId]), 31 | qlc:e(QH) 32 | end, 33 | {atomic, People} = mnesia:transaction(Txn), 34 | sw_core_paginate:select(People, Args); 35 | %% end::residentConnection[] 36 | <<"created">> -> {ok, Planet#planet.created}; 37 | <<"terrains">> -> 38 | Terrains = Planet#planet.terrains, 39 | {ok, [{ok, T} || T <- Terrains]}; 40 | <<"gravity">> -> {ok, Planet#planet.gravity}; 41 | <<"orbitalPeriod">> -> {ok, integer(Planet#planet.orbital_period)}; 42 | <<"population">> -> {ok, integer(Planet#planet.population)} 43 | end. 44 | 45 | integer(I) when is_integer(I) -> I; 46 | integer(nan) -> null. 47 | 48 | -------------------------------------------------------------------------------- /apps/sw_core/src/sw_core_query.erl: -------------------------------------------------------------------------------- 1 | -module(sw_core_query). 2 | -include("sw_core_db.hrl"). 3 | -include_lib("stdlib/include/qlc.hrl"). 4 | 5 | -export([execute/4]). 6 | 7 | %% tag::execute[] 8 | execute(_Ctx, _DummyObj, <<"node">>, #{ <<"id">> := ID }) -> 9 | load_node(any, ID); 10 | %% end::execute[] 11 | execute(_Ctx, _DummyObj, <<"starship">>, #{ <<"id">> := ID }) -> 12 | load_node(['Starship'], ID); 13 | execute(_Ctx, _DummyObj, <<"allPlanets">>, _Args) -> 14 | {atomic, Planets} = mnesia:transaction(load_all(planet)), 15 | {ok, Planets}; 16 | 17 | execute(_Ctx, _DummyObj, <<"allStarships">>, _Args) -> 18 | {atomic, Starships} = mnesia:transaction(load_all(starship)), 19 | {ok, Starships}; 20 | execute(_Ctx, _DummyObj, <<"allPeople">>, _Args) -> 21 | {atomic, People} = mnesia:transaction(load_all(person)), 22 | {ok, People}; 23 | execute(_Ctx, _DummyObj, <<"allVehicles">>, _Args) -> 24 | {atomic, Vehicles} = mnesia:transaction(load_all(vehicle)), 25 | {ok, Vehicles}; 26 | execute(_Ctx, _DummyObj, <<"allSpecies">>, _Args) -> 27 | {atomic, Species} = mnesia:transaction(load_all(species)), 28 | {ok, Species}; 29 | execute(_Ctx, _DummyObj, <<"filmByEpisode">>, #{ <<"episode">> := Episode }) -> 30 | {atomic, Films} = mnesia:transaction(load_film_by_episode(Episode)), 31 | case Films of 32 | [] -> 33 | %% This shouldn't happen 34 | {ok, null}; 35 | [Film] -> 36 | {ok, Film} 37 | end; 38 | execute(_Ctx, _DummyObj, <<"allFilms">>, _Args) -> 39 | {atomic, Films} = mnesia:transaction(load_all(film)), 40 | {ok, Films}. 41 | 42 | load_all(vehicle) -> 43 | fun() -> 44 | QH = qlc:q([{ok, #{ vehicle => V, transport => T }} || 45 | V <- mnesia:table(vehicle), 46 | T <- mnesia:table(transport), 47 | V#vehicle.id == T#transport.id]), 48 | qlc:e(QH) 49 | end; 50 | load_all(starship) -> 51 | fun() -> 52 | QH = qlc:q([{ok, #{ starship => S, transport => T }} || 53 | S <- mnesia:table(starship), 54 | T <- mnesia:table(transport), 55 | S#starship.id == T#transport.id]), 56 | qlc:e(QH) 57 | end; 58 | load_all(Tab) -> 59 | fun() -> 60 | QH = qlc:q([{ok, F} || F <- mnesia:table(Tab)]), 61 | qlc:e(QH) 62 | end. 63 | 64 | load_film_by_episode(Episode) -> 65 | fun() -> 66 | QH = qlc:q([F || F <- mnesia:table(film), 67 | F#film.episode == Episode ]), 68 | qlc:e(QH) 69 | end. 70 | 71 | 72 | 73 | %% tag::loadNode[] 74 | load_node(Types, ID) when is_binary(ID) -> 75 | case sw_core_id:decode(ID) of 76 | {ok, Decoded} -> 77 | load_node_(Types, Decoded); 78 | {error, Reason} -> 79 | {error, Reason} 80 | end. 81 | 82 | load_node_(any, {Type, MID}) -> 83 | sw_core_db:load(Type, MID); 84 | load_node_(TypeList, {Type, MID}) -> 85 | case lists:member(Type, TypeList) of 86 | true -> 87 | sw_core_db:load(Type, MID); 88 | false -> 89 | {error, wrong_type} 90 | end. 91 | %% end::loadNode[] 92 | -------------------------------------------------------------------------------- /apps/sw_core/src/sw_core_scalar.erl: -------------------------------------------------------------------------------- 1 | %% tag::scalarRepr[] 2 | -module(sw_core_scalar). 3 | 4 | -export([input/2, output/2]). 5 | 6 | input(<<"DateTime">>, Input) -> 7 | try iso8601:parse(Input) of 8 | DateTime -> {ok, DateTime} 9 | catch 10 | error:badarg -> 11 | {error, bad_date} 12 | end; 13 | input(_Type, Val) -> 14 | {ok, Val}. 15 | 16 | output(<<"DateTime">>, DateTime) -> 17 | {ok, iso8601:format(DateTime)}; 18 | output(_Type, Val) -> 19 | {ok, Val}. 20 | %% end::scalarRepr[] 21 | -------------------------------------------------------------------------------- /apps/sw_core/src/sw_core_species.erl: -------------------------------------------------------------------------------- 1 | -module(sw_core_species). 2 | -include("sw_core_db.hrl"). 3 | -include_lib("stdlib/include/qlc.hrl"). 4 | 5 | -export([execute/4]). 6 | 7 | %% tag::execute[] 8 | execute(_Ctx, #species { id = Id } = Species, Field, Args) -> 9 | case Field of 10 | <<"id">> -> {ok, sw_core_id:encode({'Species', Id})}; 11 | <<"name">> -> {ok, Species#species.name}; 12 | <<"eyeColors">> -> 13 | {ok, 14 | [{ok, EC} || EC <- Species#species.eye_colors]}; 15 | %% end::execute[] 16 | <<"classification">> -> {ok, Species#species.classification}; 17 | <<"designation">> -> {ok, Species#species.designation}; 18 | <<"averageHeight">> -> {ok, Species#species.average_height}; 19 | <<"averageLifespan">> -> {ok, Species#species.average_lifespan}; 20 | <<"hairColors">> -> 21 | {ok, 22 | [{ok, EC} || EC <- Species#species.hair_colors]}; 23 | <<"skinColors">> -> 24 | {ok, 25 | [{ok, EC} || EC <- Species#species.skin_colors]}; 26 | <<"language">> -> {ok, Species#species.language}; 27 | <<"homeworld">> -> 28 | Txn = fun() -> 29 | mnesia:read(planet, Species#species.homeworld) 30 | end, 31 | %% Droids do not have homeworlds 32 | case mnesia:transaction(Txn) of 33 | {atomic, [Planet]} -> {ok, Planet}; 34 | {atomic, []} -> {ok, null} 35 | end; 36 | <<"personConnection">> -> 37 | Txn = fun() -> 38 | QH = qlc:q([P || P <- mnesia:table(person), 39 | lists:member(P#person.id, 40 | Species#species.people)]), 41 | qlc:e(QH) 42 | end, 43 | {atomic, People} = mnesia:transaction(Txn), 44 | sw_core_paginate:select(People, Args); 45 | <<"filmConnection">> -> 46 | Txn = fun() -> 47 | QH = qlc:q([F || P <- mnesia:table(person), 48 | lists:member(P#person.id, 49 | Species#species.people), 50 | F <- mnesia:table(film), 51 | lists:member(P#person.id, 52 | F#film.characters)]), 53 | qlc:e(qlc:sort(QH, [{unique, true}])) 54 | end, 55 | {atomic, Films} = mnesia:transaction(Txn), 56 | sw_core_paginate:select(Films, Args); 57 | <<"created">> -> {ok, Species#species.created}; 58 | <<"edited">> -> {ok, Species#species.edited} 59 | end. 60 | -------------------------------------------------------------------------------- /apps/sw_core/src/sw_core_starship.erl: -------------------------------------------------------------------------------- 1 | -module(sw_core_starship). 2 | -include("sw_core_db.hrl"). 3 | -include_lib("stdlib/include/qlc.hrl"). 4 | 5 | -export([execute/4]). 6 | -export([introduce/2]). 7 | 8 | %% tag::starshipExecute[] 9 | execute(_Ctx, #{ starship := #starship { id = StarshipId } = Starship, 10 | transport := Transport }, Field, Args) -> 11 | case Field of 12 | <<"id">> -> 13 | {ok, sw_core_id:encode({'Starship', Starship#starship.id})}; 14 | <<"name">> -> {ok, Transport#transport.name}; 15 | <<"model">> -> {ok, Transport#transport.model}; 16 | <<"starshipClass">> -> {ok, Starship#starship.starship_class}; 17 | <<"costInCredits">> -> {ok, floatify(Transport#transport.cost)}; 18 | <<"length">> -> {ok, Transport#transport.length}; 19 | <<"crew">> -> {ok, Transport#transport.crew}; 20 | <<"passengers">> -> 21 | Result = case Transport#transport.passengers of 22 | undefined -> null; 23 | P -> P 24 | end, 25 | {ok, Result}; 26 | <<"manufacturers">> -> {ok, [{ok, M} || M <- Transport#transport.manufacturers]}; 27 | <<"maxAtmospheringSpeed">> -> 28 | {ok, Transport#transport.max_atmosphering_speed}; 29 | <<"hyperdriveRating">> -> 30 | {ok, Starship#starship.hyperdrive_rating}; 31 | <<"MGLT">> -> 32 | {ok, Starship#starship.mglt}; 33 | <<"cargoCapacity">> -> 34 | Capacity = Transport#transport.cargo_capacity, 35 | {ok, floatify(Capacity)}; 36 | <<"consumables">> -> {ok, 37 | case Transport#transport.consumables of 38 | undefined -> null; 39 | V -> V 40 | end}; 41 | <<"created">> -> {ok, Transport#transport.created}; 42 | <<"edited">> -> {ok, Transport#transport.edited}; 43 | %% end::starshipExecute[] 44 | <<"pilotConnection">> -> 45 | #starship { pilots = Pilots } = Starship, 46 | Txn = fun() -> 47 | [mnesia:read(person, P) || P <- Pilots] 48 | end, 49 | {atomic, Records} = mnesia:transaction(Txn), 50 | sw_core_paginate:select(lists:append(Records), Args); 51 | <<"filmConnection">> -> 52 | Txn = fun() -> 53 | QH = qlc:q([F || F <- mnesia:table(film), 54 | lists:member(StarshipId, F#film.starships)]), 55 | qlc:e(QH) 56 | end, 57 | {atomic, Records} = mnesia:transaction(Txn), 58 | sw_core_paginate:select(Records, Args) 59 | end. 60 | 61 | %% tag::introduce[] 62 | introduce(_Ctx, #{ <<"name">> := Name, 63 | <<"model">> := Model, 64 | <<"starshipClass">> := Class, 65 | <<"manufacturers">> := Manufacturers, 66 | <<"costInCredits">> := Cost, 67 | <<"length">> := Length, 68 | <<"crew">> := Crew, 69 | <<"faction">> := FactionInput }) -> 70 | ID = sw_core_db:nextval(transport), % <1> 71 | Transport = #transport { id = ID, 72 | name = Name, 73 | created = current_time(), 74 | edited = current_time(), 75 | crew = Crew, 76 | model = Model, 77 | cost = Cost, 78 | length = Length, 79 | passengers = undefined, 80 | consumables = undefined, 81 | max_atmosphering_speed = 0, 82 | cargo_capacity = nan, 83 | manufacturers = Manufacturers }, 84 | Starship = #starship { id = ID, 85 | pilots = [], 86 | mglt = 0, 87 | hyperdrive_rating = 0.0, 88 | starship_class = Class }, % <2> 89 | {ok, {'Faction', FactionID}} = 90 | sw_core_id:decode(FactionInput), % <3> 91 | case sw_core_db:load('Faction', FactionID) of % <4> 92 | {ok, #faction { id = FactionRef } = Faction} -> 93 | Txn = fun() -> 94 | ok = mnesia:write(Starship), 95 | ok = mnesia:write(Transport#transport { 96 | faction = FactionRef 97 | }), % <5> 98 | ok 99 | end, 100 | {atomic, ok} = mnesia:transaction(Txn), 101 | {ok, Faction, #{ starship => Starship, 102 | transport => Transport#transport { 103 | faction = FactionRef 104 | }}}; % <6> 105 | {error, Reason} -> 106 | {error, Reason} 107 | end. 108 | %% end::introduce[] 109 | 110 | 111 | floatify(nan) -> null; 112 | floatify(I) -> float(I). 113 | 114 | current_time() -> 115 | calendar:universal_time(). 116 | 117 | -------------------------------------------------------------------------------- /apps/sw_core/src/sw_core_sup.erl: -------------------------------------------------------------------------------- 1 | %%%------------------------------------------------------------------- 2 | %% @doc sw_core top level supervisor. 3 | %% @end 4 | %%%------------------------------------------------------------------- 5 | 6 | -module(sw_core_sup). 7 | 8 | -behaviour(supervisor). 9 | 10 | %% API 11 | -export([start_link/0]). 12 | 13 | %% Supervisor callbacks 14 | -export([init/1]). 15 | 16 | -define(SERVER, ?MODULE). 17 | 18 | %%==================================================================== 19 | %% API functions 20 | %%==================================================================== 21 | 22 | start_link() -> 23 | supervisor:start_link({local, ?SERVER}, ?MODULE, []). 24 | 25 | %%==================================================================== 26 | %% Supervisor callbacks 27 | %%==================================================================== 28 | 29 | %% Child :: {Id,StartFunc,Restart,Shutdown,Type,Modules} 30 | init([]) -> 31 | {ok, { {one_for_all, 0, 1}, []} }. 32 | 33 | %%==================================================================== 34 | %% Internal functions 35 | %%==================================================================== 36 | -------------------------------------------------------------------------------- /apps/sw_core/src/sw_core_type.erl: -------------------------------------------------------------------------------- 1 | -module(sw_core_type). 2 | 3 | -include("sw_core_db.hrl"). 4 | -export([execute/1]). 5 | 6 | %% tag::resolveType[] 7 | execute(#film{}) -> {ok, 'Film'}; 8 | execute(#person{}) -> {ok, 'Person'}; 9 | execute(#planet{}) -> {ok, 'Planet'}; 10 | execute(#species{}) -> {ok, 'Species'}; 11 | execute(#starship{}) -> {ok, 'Starship'}; 12 | execute(#transport{}) -> {ok, 'Transport'}; 13 | execute(#vehicle{}) -> {ok, 'Vehicle'}; 14 | execute(#faction{}) -> {ok, 'Faction'}; 15 | execute(#{ starship := _, transport := _ }) -> {ok, 'Starship'}; 16 | execute(#{ vehicle := _, transport := _ }) -> {ok, 'Vehicle'}; 17 | execute(_Otherwise) -> {error, unknown_type}. 18 | %% end::resolveType[] 19 | 20 | -------------------------------------------------------------------------------- /apps/sw_core/src/sw_core_vehicle.erl: -------------------------------------------------------------------------------- 1 | -module(sw_core_vehicle). 2 | -include("sw_core_db.hrl"). 3 | -include_lib("stdlib/include/qlc.hrl"). 4 | 5 | -export([execute/4]). 6 | 7 | %% tag::starshipExecute[] 8 | execute(_Ctx, #{ vehicle := #vehicle { id = VehicleId } = Vehicle, 9 | transport := Transport }, Field, Args) -> 10 | case Field of 11 | <<"id">> -> 12 | {ok, sw_core_id:encode({'Vehicle', Vehicle#vehicle.id})}; 13 | <<"name">> -> {ok, Transport#transport.name}; 14 | <<"model">> -> {ok, Transport#transport.model}; 15 | <<"vehicleClass">> -> {ok, Vehicle#vehicle.vehicle_class}; 16 | <<"costInCredits">> -> {ok, Transport#transport.cost}; 17 | <<"length">> -> {ok, Transport#transport.length}; 18 | <<"crew">> -> {ok, Transport#transport.crew}; 19 | <<"passengers">> -> {ok, Transport#transport.passengers}; 20 | <<"manufacturers">> -> {ok, [{ok, M} || M <- Transport#transport.manufacturers]}; 21 | <<"maxAtmospheringSpeed">> -> 22 | {ok, Transport#transport.max_atmosphering_speed}; 23 | <<"cargoCapacity">> -> {ok, Transport#transport.cargo_capacity}; 24 | <<"consumables">> -> {ok, Transport#transport.consumables}; 25 | <<"created">> -> {ok, Transport#transport.created}; 26 | <<"edited">> -> {ok, Transport#transport.edited}; 27 | %% end::starshipExecute[] 28 | <<"pilotConnection">> -> 29 | #vehicle { pilots = Pilots } = Vehicle, 30 | Txn = fun() -> 31 | [mnesia:read(person, P) || P <- Pilots] 32 | end, 33 | {atomic, Records} = mnesia:transaction(Txn), 34 | sw_core_paginate:select(lists:append(Records), Args); 35 | <<"filmConnection">> -> 36 | Txn = fun() -> 37 | QH = qlc:q([F || F <- mnesia:table(film), 38 | lists:member(VehicleId, F#film.vehicles)]), 39 | qlc:e(QH) 40 | end, 41 | {atomic, Records} = mnesia:transaction(Txn), 42 | sw_core_paginate:select(Records, Args) 43 | end. 44 | -------------------------------------------------------------------------------- /apps/sw_web/.gitignore: -------------------------------------------------------------------------------- 1 | .rebar3 2 | _* 3 | .eunit 4 | *.o 5 | *.beam 6 | *.plt 7 | *.swp 8 | *.swo 9 | .erlang.cookie 10 | ebin 11 | log 12 | erl_crash.dump 13 | .rebar 14 | logs 15 | _build 16 | .idea 17 | rebar3.crashdump 18 | -------------------------------------------------------------------------------- /apps/sw_web/priv/site/index.html: -------------------------------------------------------------------------------- 1 | 8 | 9 | 10 | 11 | 22 | 23 | 30 | 31 | 32 | 33 | 34 | 35 | 40 | 41 | 42 | 43 | 44 | Graphql Star Wars API 45 | 46 |
Loading...
47 | 149 | 150 | 151 | -------------------------------------------------------------------------------- /apps/sw_web/src/Makefile: -------------------------------------------------------------------------------- 1 | compile: 2 | $(MAKE) -C ../../.. compile 3 | -------------------------------------------------------------------------------- /apps/sw_web/src/sw_web.app.src: -------------------------------------------------------------------------------- 1 | {application, sw_web, 2 | [{description, "An OTP application"}, 3 | {vsn, "0.1.0"}, 4 | {registered, []}, 5 | {mod, { sw_web_app, []}}, 6 | {applications, 7 | [kernel, 8 | stdlib, 9 | 10 | sw_core, 11 | cowboy]}, 12 | {env,[ 13 | {http_port, 17290} 14 | ]}, 15 | {modules, []}, 16 | 17 | {maintainers, []}, 18 | {licenses, []}, 19 | {links, []} 20 | ]}. 21 | -------------------------------------------------------------------------------- /apps/sw_web/src/sw_web_app.erl: -------------------------------------------------------------------------------- 1 | %%%------------------------------------------------------------------- 2 | %% @doc sw_web public API 3 | %% @end 4 | %%%------------------------------------------------------------------- 5 | 6 | -module(sw_web_app). 7 | 8 | -behaviour(application). 9 | 10 | %% Application callbacks 11 | -export([start/2, stop/1]). 12 | 13 | %%==================================================================== 14 | %% API 15 | %%==================================================================== 16 | 17 | start(_StartType, _StartArgs) -> 18 | %% tag::dispatcher[] 19 | Dispatch = 20 | cowboy_router:compile( 21 | [{'_', 22 | [{"/assets/[...]", cowboy_static, 23 | {priv_dir, sw_web, "site/assets"}}, 24 | {"/", sw_web_graphql_handler, 25 | {priv_file, sw_web, "site/index.html"}} 26 | ]}]), 27 | %% end::dispatcher[] 28 | {ok, Port} = application:get_env(sw_web, http_port), 29 | error_logger:info_msg("Starting HTTP listener on port ~p", [Port]), 30 | {ok, Pid} = sw_web_sup:start_link(), 31 | cowboy:start_clear(sw_http, 32 | [{port, Port}], 33 | #{env => #{dispatch => Dispatch}, 34 | stream_handlers => [cowboy_compress_h, cowboy_stream_h], 35 | %% Bump the default limit of 8000 to 65536 to allow us to submit 36 | %% slightly larger, human readable, query documents. The limit of 37 | %% 65536 is chosen to allow us to have 8 times bigger documents 38 | %% than the default where we hit the limit of 8000. If you are 39 | %% hitting the bumped limit you should probably consider splitting 40 | %% up your query document into two. 41 | %% 42 | %% Caveat: If you are testing on localhost you might not see the 43 | %% max limit have any effect since the socket might make the entire 44 | %% HTTP request available when cowboy does a gen_tcp:read(Socket, 0) 45 | %% and will ignore the limit. 46 | max_request_line_length => 65536, 47 | 48 | %% Bump the default limit of 4096 on Header lengths to 16384. The 49 | %% problem is we will eventually get a very large document as a 50 | %% referrer from GraphiQL and this will break the server side as it 51 | %% has to process through that header 52 | max_header_value_length => 16384 53 | } 54 | ), 55 | {ok, Pid}. 56 | 57 | %%-------------------------------------------------------------------- 58 | stop(_State) -> 59 | ok. 60 | 61 | %%==================================================================== 62 | %% Internal functions 63 | %%==================================================================== 64 | -------------------------------------------------------------------------------- /apps/sw_web/src/sw_web_graphql_handler.erl: -------------------------------------------------------------------------------- 1 | %% tag::exports[] 2 | -module(sw_web_graphql_handler). 3 | 4 | %% Cowboy Handler Interface 5 | -export([init/2]). 6 | 7 | %% REST callbacks 8 | -export([ 9 | allowed_methods/2, 10 | resource_exists/2, 11 | content_types_provided/2, 12 | content_types_accepted/2, 13 | charsets_provided/2 14 | ]). 15 | 16 | %% Data input/output callbacks 17 | -export([ 18 | from_json/2, 19 | to_json/2, 20 | to_html/2 21 | ]). 22 | %% end::exports[] 23 | 24 | %% -- API --------------------------------------------------- 25 | %% tag::init[] 26 | init(Req, {priv_file, _, _} = PrivFile) -> 27 | {cowboy_rest, 28 | Req, 29 | #{ index_location => PrivFile }}. 30 | %% end::init[] 31 | 32 | %% tag::allowed_methods[] 33 | allowed_methods(Req, State) -> 34 | {[<<"GET">>, <<"POST">>], Req, State}. 35 | %% end::allowed_methods[] 36 | 37 | %% tag::content_types_accepted[] 38 | content_types_accepted(Req, State) -> 39 | {[ 40 | {{<<"application">>, <<"json">>, []}, from_json} 41 | ], Req, State}. 42 | %% end::content_types_accepted[] 43 | 44 | %% tag::content_types_provided[] 45 | content_types_provided(Req, State) -> 46 | {[ 47 | {{<<"application">>, <<"json">>, []}, to_json}, 48 | {{<<"text">>, <<"html">>, []}, to_html} 49 | ], Req, State}. 50 | %% end::content_types_provided[] 51 | 52 | %% tag::charsets_provided[] 53 | charsets_provided(Req, State) -> 54 | {[<<"utf-8">>], Req, State}. 55 | %% end::charsets_provided[] 56 | 57 | %% tag::resource_exists[] 58 | resource_exists(#{ method := <<"GET">> } = Req, State) -> 59 | {true, Req, State}; 60 | resource_exists(#{ method := <<"POST">> } = Req, State) -> 61 | {false, Req, State}. 62 | %% end::resource_exists[] 63 | 64 | %% tag::to_html[] 65 | to_html(Req, #{ index_location := 66 | {priv_file, App, FileLocation}} = State) -> 67 | Filename = filename:join(code:priv_dir(App), FileLocation), 68 | {ok, Data} = file:read_file(Filename), 69 | {Data, Req, State}. 70 | %% end::to_html[] 71 | 72 | %% tag::json_processing[] 73 | json_request(Req, State) -> 74 | case gather(Req) of 75 | {error, Reason} -> 76 | err(400, Reason, Req, State); 77 | {ok, Req2, Decoded} -> 78 | run_request(Decoded, Req2, State) 79 | end. 80 | 81 | from_json(Req, State) -> json_request(Req, State). 82 | to_json(Req, State) -> json_request(Req, State). 83 | %% end::json_processing[] 84 | 85 | %% -- INTERNAL FUNCTIONS --------------------------------------- 86 | 87 | %% tag::run_request[] 88 | run_request(#{ document := undefined }, Req, State) -> 89 | err(400, no_query_supplied, Req, State); 90 | run_request(#{ document := Doc} = ReqCtx, Req, State) -> 91 | case graphql:parse(Doc) of 92 | {ok, AST} -> 93 | run_preprocess(ReqCtx#{ document := AST }, Req, State); 94 | {error, Reason} -> 95 | err(400, Reason, Req, State) 96 | end. 97 | %% end::run_request[] 98 | 99 | %% tag::run_preprocess[] 100 | run_preprocess(#{ document := AST } = ReqCtx, Req, State) -> 101 | try 102 | Elaborated = graphql:elaborate(AST), % <1> 103 | {ok, #{ 104 | fun_env := FunEnv, 105 | ast := AST2 }} = graphql:type_check(Elaborated), % <2> 106 | ok = graphql:validate(AST2), % <3> 107 | run_execute(ReqCtx#{ document := AST2, fun_env => FunEnv }, Req, State) 108 | catch 109 | throw:Err -> 110 | err(400, Err, Req, State) 111 | end. 112 | %% end::run_preprocess[] 113 | 114 | %% tag::run_execute[] 115 | run_execute(#{ document := AST, 116 | fun_env := FunEnv, 117 | vars := Vars, 118 | operation_name := OpName }, Req, State) -> 119 | Coerced = graphql:type_check_params(FunEnv, OpName, Vars), % <1> 120 | Ctx = #{ 121 | params => Coerced, 122 | operation_name => OpName }, 123 | Response = graphql:execute(Ctx, AST), % <2> 124 | ResponseBody = sw_web_response:term_to_json(Response), % <3> 125 | Req2 = cowboy_req:set_resp_body(ResponseBody, Req), % <4> 126 | Reply = cowboy_req:reply(200, Req2), 127 | {stop, Reply, State}. 128 | %% end::run_execute[] 129 | 130 | %% tag::gather[] 131 | gather(Req) -> 132 | {ok, Body, Req2} = cowboy_req:read_body(Req), 133 | Bindings = cowboy_req:bindings(Req2), 134 | try jsx:decode(Body, [return_maps]) of 135 | JSON -> 136 | gather(Req2, JSON, Bindings) 137 | catch 138 | error:badarg -> 139 | {error, invalid_json_body} 140 | end. 141 | 142 | gather(Req, Body, Params) -> 143 | QueryDocument = document([Params, Body]), 144 | case variables([Params, Body]) of 145 | {ok, Vars} -> 146 | Operation = operation_name([Params, Body]), 147 | {ok, Req, #{ document => QueryDocument, 148 | vars => Vars, 149 | operation_name => Operation}}; 150 | {error, Reason} -> 151 | {error, Reason} 152 | end. 153 | %% end::gather[] 154 | 155 | %% tag::document[] 156 | document([#{ <<"query">> := Q }|_]) -> Q; 157 | document([_|Next]) -> document(Next); 158 | document([]) -> undefined. 159 | %% end::document[] 160 | 161 | %% tag::variables[] 162 | variables([#{ <<"variables">> := Vars} | _]) -> 163 | if 164 | is_binary(Vars) -> 165 | try jsx:decode(Vars, [return_maps]) of 166 | null -> {ok, #{}}; 167 | JSON when is_map(JSON) -> {ok, JSON}; 168 | _ -> {error, invalid_json} 169 | catch 170 | error:badarg -> 171 | {error, invalid_json} 172 | end; 173 | is_map(Vars) -> 174 | {ok, Vars}; 175 | Vars == null -> 176 | {ok, #{}} 177 | end; 178 | variables([_ | Next]) -> 179 | variables(Next); 180 | variables([]) -> 181 | {ok, #{}}. 182 | %% end::variables[] 183 | 184 | %% tag::operation_name[] 185 | operation_name([#{ <<"operationName">> := OpName } | _]) -> 186 | OpName; 187 | operation_name([_ | Next]) -> 188 | operation_name(Next); 189 | operation_name([]) -> 190 | undefined. 191 | %% tag::operation_name[] 192 | 193 | 194 | %% tag::errors[] 195 | err(Code, Msg, Req, State) -> 196 | Formatted = iolist_to_binary(io_lib:format("~p", [Msg])), 197 | Err = #{ type => error, 198 | message => Formatted }, 199 | Body = jsx:encode(#{ errors => [Err] }), 200 | Req2 = cowboy_req:set_resp_body(Body, Req), 201 | Reply = cowboy_req:reply(Code, Req2), 202 | {stop, Reply, State}. 203 | %% end::errors[] 204 | -------------------------------------------------------------------------------- /apps/sw_web/src/sw_web_response.erl: -------------------------------------------------------------------------------- 1 | -module(sw_web_response). 2 | -export([term_to_json/1]). 3 | 4 | term_to_json(Term) -> 5 | jsx:encode(fixup(Term)). 6 | 7 | %% Ground types 8 | fixup(Term) when is_number(Term) -> Term; 9 | fixup(Term) when is_atom(Term) -> Term; 10 | fixup(Term) when is_binary(Term) -> Term; 11 | %% Compound types 12 | fixup(Term) when is_list(Term) -> 13 | [fixup(T) || T <- Term]; 14 | fixup(Term) when is_map(Term) -> 15 | KVs = maps:to_list(Term), 16 | maps:from_list([{fixup_key(K), fixup(V)} || {K, V} <- KVs]); 17 | fixup(Term) -> 18 | %% Every other term is transformed into a binary value 19 | iolist_to_binary( 20 | io_lib:format("~p", [Term])). 21 | 22 | fixup_key(Term) -> 23 | case fixup(Term) of 24 | T when is_binary(T) -> 25 | T; 26 | T -> 27 | iolist_to_binary(io_lib:format("~p", [T])) 28 | end. 29 | -------------------------------------------------------------------------------- /apps/sw_web/src/sw_web_sup.erl: -------------------------------------------------------------------------------- 1 | %%%------------------------------------------------------------------- 2 | %% @doc sw_web top level supervisor. 3 | %% @end 4 | %%%------------------------------------------------------------------- 5 | 6 | -module(sw_web_sup). 7 | 8 | -behaviour(supervisor). 9 | 10 | %% API 11 | -export([start_link/0]). 12 | 13 | %% Supervisor callbacks 14 | -export([init/1]). 15 | 16 | -define(SERVER, ?MODULE). 17 | 18 | %%==================================================================== 19 | %% API functions 20 | %%==================================================================== 21 | 22 | start_link() -> 23 | supervisor:start_link({local, ?SERVER}, ?MODULE, []). 24 | 25 | %%==================================================================== 26 | %% Supervisor callbacks 27 | %%==================================================================== 28 | 29 | %% Child :: {Id,StartFunc,Restart,Shutdown,Type,Modules} 30 | init([]) -> 31 | {ok, { {one_for_all, 0, 1}, []} }. 32 | 33 | %%==================================================================== 34 | %% Internal functions 35 | %%==================================================================== 36 | -------------------------------------------------------------------------------- /config/sys.config: -------------------------------------------------------------------------------- 1 | [ 2 | {sw_core, []}, 3 | 4 | %% Mnesia setup 5 | {mnesia, [ 6 | {dir, "./db"}, 7 | {schema_location, disc}]}, 8 | 9 | %% Lager configuration. 10 | {lager, [ 11 | {log_root, "./"}, 12 | 13 | % Console colors. 14 | {colored, true}, 15 | 16 | % Log handlers configuration. 17 | {handlers, [ 18 | {lager_console_backend, info}, 19 | 20 | % Error logging. 21 | {lager_file_backend, [ 22 | {file, "error.log"}, 23 | {level, error}, 24 | {date, "$D0"}, 25 | {size, 10485760}, 26 | {count, 5} 27 | ]}, 28 | 29 | % Info logging. 30 | {lager_file_backend, [ 31 | {file, "console.log"}, 32 | {level, info}, 33 | {date, "$D0"}, 34 | {size, 10485760}, 35 | {count, 5} 36 | ]}, 37 | 38 | % Debug logging. 39 | {lager_file_backend, [ 40 | {file, "debug.log"}, 41 | {level, debug}, 42 | {date, "$D0"}, 43 | {size, 10485760}, 44 | {count, 5} 45 | ]} 46 | ]} 47 | ]}, 48 | 49 | %% SASL configuration. 50 | {sasl, [ 51 | % Log file. 52 | {sasl_error_logger, {file, "log/sasl-error.log"}}, 53 | 54 | % Log type. 55 | {errlog_type, error}, 56 | 57 | % Log directory. 58 | {error_logger_mf_dir, "log/sasl"}, 59 | 60 | % 10 MB max file size. 61 | {error_logger_mf_maxbytes, 10485760}, 62 | 63 | % 5 files max. 64 | {error_logger_mf_maxfiles, 5} 65 | ]} 66 | ]. 67 | 68 | -------------------------------------------------------------------------------- /config/vm.args: -------------------------------------------------------------------------------- 1 | -name sw@127.0.0.1 2 | -setcookie Chronteer7 3 | 4 | +K true 5 | +A 30 6 | 7 | # Deliberately set a fairly low count on Max number of open ports 8 | +Q 2048 9 | 10 | -------------------------------------------------------------------------------- /db/FALLBACK.BUP: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jlouis/graphql-erlang-tutorial/c2cebedf45e8b2bd3eb798977ebb72e13b9bfd05/db/FALLBACK.BUP -------------------------------------------------------------------------------- /doc/Makefile: -------------------------------------------------------------------------------- 1 | documentation: 2 | $(MAKE) -C .. documentation 3 | -------------------------------------------------------------------------------- /doc/book.asciidoc: -------------------------------------------------------------------------------- 1 | = {project} Tutorial 2 | Jesper Louis Andersen ; Martin Gausby ; ShopGun 3 | Nov 2017 4 | :toc: left 5 | :icons: font 6 | :source-highlighter: prettify 7 | :sw_core: ../apps/sw_core 8 | :sw_web: ../apps/sw_web 9 | :sw_test: ../test 10 | :project: Erlang GraphQL 11 | :relay: Relay Modern 12 | :shopgun: ShopGun 13 | :star-wars: Star Wars 14 | :cowboy-version: 2.2.x 15 | :port-number: 17290 16 | :imagesdir: ./images 17 | 18 | {project} Tutorial 19 | 20 | The guide here is a running example of an API implemented in Erlang 21 | through the ShopGun GraphQL engine. The API is a frontend to a 22 | database, containing information about the Star Wars films by George 23 | Lucas. The intent is to provide readers with enough information they 24 | can go build their own GraphQL servers in Erlang. 25 | 26 | We use the GraphQL system at https://shopgun.com as a data backend. We 27 | sponsor this tutorial as part of our Open Source efforts. We developed 28 | this GraphQL system to meet our demands as our system evolves. The 29 | world of tracking businesses and offers is a highly heterogeneous 30 | dataset, which requires the flexibility of something like GraphQL. 31 | 32 | Because GraphQL provides a lot of great tooling, we decided to move 33 | forward and implement a server backend for Erlang, which didn't exist 34 | at the time. 35 | 36 | At the same time, we recognize other people may be interested in the 37 | system and its development. Hence the decision was made to open source 38 | the GraphQL parts of the system. 39 | 40 | include::introduction.asciidoc[Introduction] 41 | include::why_graphql.asciidoc[Why GraphQL] 42 | include::system_tour.asciidoc[System Tour] 43 | include::getting_started.asciidoc[Getting Started] 44 | include::schema.asciidoc[Schema] 45 | include::scalar_resolution.asciidoc[Scalar Resolution] 46 | include::enum_resolution.asciidoc[Enum Resolution] 47 | include::type_resolution.asciidoc[Type Resolution] 48 | include::object_resolution.asciidoc[Object Resolution] 49 | include::transports.asciidoc[Transports] 50 | include::graphiql.asciidoc[GraphiQL] 51 | include::errors.asciidoc[Error Handling] 52 | include::relay_modern.asciidoc[Relay Modern] 53 | include::security.asciidoc[Security] 54 | 55 | [[annotations]] 56 | == Annotations 57 | 58 | TBD 59 | 60 | include::tricks.asciidoc[Tricks] 61 | 62 | [appendix] 63 | include::terms.asciidoc[Terms] 64 | 65 | [appendix] 66 | include::code.asciidoc[Code Overview] 67 | [appendix] 68 | [[changelog]] 69 | == Changelog 70 | 71 | Nov 6, 2017:: Document enumerated types. They have been inside the 72 | system in several different variants over the last months, but now 73 | we have a variant we are happy with, so document it and lock it down 74 | as the way to handle enumerated types in the system. Add `Episode` 75 | as a type which is enumerated in the system as an example. Also add 76 | lookups by episode to demonstrate the input/output paths for 77 | enumerated values. (Large parts of this work is due to a {shopgun} 78 | intern, Callum Roberts). 79 | 80 | Oct 18, 2017:: Document a trick: How one implements lazy evaluation in 81 | a GraphQL schema in the engine. Make sure that all code passes the 82 | dialyzer and enable dialyzer runs in Travis CI. 83 | 84 | June 22nd, 2017:: Merged a set of issues found by @benbro where 85 | wording made certain sections harder to understand. See issues #21, 86 | and #23-26. 87 | 88 | June 5th, 2017:: Merged a set of typo fixes to the documentation by 89 | @benbro. 90 | 91 | May 30th, 2017:: Documented a more complex mutation example, 92 | <>, which explains how to carry out more 93 | complex queries. Also added this as an example to the 94 | <>. 95 | 96 | May 29th, 2017:: Moved <> into terminology so it can be 97 | referenced from other places in the document easily. Described 98 | <>. Described <>. Made the 99 | first sweep on the documentation describing the notion of mutations. 100 | The <> now includes simple mutations as an example. 101 | 102 | May 24th, 2017:: Described Scalar Coercion in more detail in 103 | <>. Change the schema such that a *DateTime* 104 | scalar is used for the fields `created` and `edited` in all output 105 | objects. Then demonstrate how this is used to coerce values. 106 | 107 | May 22nd, 2017:: Documented how to resolve array objects in 108 | <>. 109 | 110 | -------------------------------------------------------------------------------- /doc/code.asciidoc: -------------------------------------------------------------------------------- 1 | == Code Overview 2 | 3 | This section describes all the files in the repository and what their 4 | purpose are: 5 | 6 | === Root 7 | 8 | .`rebar.config` 9 | 10 | The rebar3 configuration file. It contains information about the 11 | immediate system dependencies of the project. It also contains 12 | information for `relx` the release builder rebar3 uses. This is used 13 | to assemble a release by copying the Erlang runtime as well as the 14 | necessary support libraries into a release directory. This directory 15 | can then be archived via tar(1) or zip(1) and shipped for a production 16 | release. 17 | 18 | .`Makefile` 19 | 20 | Contains some convenience targets when building the software. In 21 | practice you have some support-calls that has to be made outside the 22 | build tool in many cases. This Makefile contains recipes for doing 23 | that, so you don't forget what is to be done. 24 | 25 | .`README.md` 26 | 27 | Instructions for the reader on GitHub. Also instructions on how to 28 | build the documentation and where to go next. 29 | 30 | .`rebar.lock` 31 | 32 | Dependency locking for reproducible builds. It makes sure you get 33 | versions of packages which are known to be working together and that 34 | upgrades of software is a deliberate action rather than an implicit 35 | one. 36 | 37 | .`config/vm.args` 38 | 39 | Release VM arguments. The release handler makes sure these become part 40 | of the system release so you can set parameters on the command line of 41 | the Erlang runtime. It is often used to fine-tune schedulers, memory 42 | allocation, or the upper bound on processes or ports. 43 | 44 | .`config/sys.config` 45 | 46 | The configuration file of the release. This allows us to override 47 | application-specific configuration knobs in the final release. Often, 48 | configuration can be handled by adding a call to 49 | `application:get_env/3` in the source code and then adding a default 50 | value to an applications `.app` file. Then it can be overridden in the 51 | `sys.config` file later, if a release needs a different setting. 52 | Another common use is to provide varying configuration for different 53 | environments. 54 | 55 | .`apps/*` 56 | 57 | The applications provided by this repository. See the following 58 | sections for their description. 59 | 60 | === Application `sw_core` 61 | 62 | .`priv/sw.schmea` 63 | 64 | The schema definition file which can be read by the {project} 65 | system. It defines the schema rules for the Star Wars API. 66 | 67 | .`src/sw_core.app.src` 68 | 69 | Application description file which `rebar3` compiles into 70 | `ebin/sw_core.app`. It contains a number of important sections for the 71 | project: 72 | 73 | * Dependencies--Listing what this application needs in order to 74 | function correctly. The release manager arranges the boot of the 75 | node such that every dependent application is started first. In 76 | short, it carries out a topological sorting of applications 77 | according to their dependencies and starts them in the right order. 78 | * Start module--Which module to invoke in order to start the 79 | application. 80 | * Environment--Application specific environmental defaults. In order 81 | to keep the `sys.config` file small sane defaults can be added here 82 | so they don't clutter the global configuration. 83 | 84 | .`src/sw_core_app.erl` 85 | 86 | The application behavior used to start the `sw_core` application. This 87 | file also contains the schema-loading code: when the system boots, we 88 | attempt to load and validate the schema. Any mistake will abort the 89 | boot process and print out a failure. 90 | 91 | .`src/sw_core_db.hrl` 92 | 93 | This header file contains the records we are using in our Mnesia 94 | database. One could have spread these over multiple files, but since 95 | the system is fairly small we use a single file for this. It is likely 96 | a larger system would split this into smaller sections. 97 | 98 | .`src/sw_core_db.erl` 99 | 100 | Wrapper around the database calls which are common in the system. Also 101 | contains the functions for creating the initial schema, which can be 102 | invoked without the `sw_core` application running. 103 | 104 | .`src/sw_core_id.erl` 105 | 106 | Handling of _ID_ values in the Graph from the client. Provides 107 | encoding and decoding of identifier values so we know what object they 108 | refer to internally. 109 | 110 | .`src/sw_core_scalar.erl` 111 | 112 | Input and output coercion for scalar values 113 | 114 | .`src/sw_core_type.erl` 115 | 116 | Describes how this GraphQL instance converts from abstract types such 117 | as Interfaces and Unions to concrete types. For instance how the 118 | system converts from the *Transport* interface to a *Starship* or 119 | *Vehicle*. 120 | 121 | .`src/sw_core_sup.erl` 122 | 123 | Top level supervisor referring to long-lived processes in this 124 | application. 125 | 126 | NOTE: Currently there are no such long-lived processes in the 127 | application. 128 | 129 | .`src/sw_core_film.erl` 130 | 131 | Code for resolving objects of type *Film*. 132 | 133 | .`src/sw_core_object.erl` 134 | 135 | Code for resolving generic objects not covered more modules which 136 | specialize to a particular object type. Generic objects are 137 | represented as maps in the system and this module handles maps in 138 | general. This allows us to easily construct new types in the Graph 139 | without having to write special handlers for each. 140 | 141 | .`src/sw_core_paginate.erl` 142 | 143 | This file implements generic pagination code for the API. It is an 144 | implementation of {relay}'s conventions for paginations and cursors 145 | (see <>). 146 | 147 | .`src/sw_core_person.erl` 148 | 149 | Code for resolving objects of type *Person*. 150 | 151 | .`src/sw_core_planet.erl` 152 | 153 | Code for resolving objects of type *Planet*. 154 | 155 | .`src/sw_core_query.erl` 156 | 157 | Code for resolving objects of type *Query*. The query object is main 158 | entry-point into the graph for data queries in which data is read out 159 | of the API. Notably it contains code for loading arbitrary objects if 160 | the client obtained a handle (id) on the object earlier. 161 | 162 | .`src/sw_core_species.erl` 163 | 164 | Code for resolving objects of type *Species*. 165 | 166 | .`src/sw_core_starship.erl` 167 | 168 | Code for resolving objects of type *Starship*. 169 | 170 | .`src/sw_core_vehicle.erl` 171 | 172 | Code for resolving objects of type *Vehicle*. 173 | 174 | === Application `sw_web` 175 | 176 | This application implements the web UI and the HTTP transport on top 177 | of the Core application. 178 | 179 | .`src/sw_web_app.erl` 180 | 181 | Application callback for the `sw_web` application. Also initializes 182 | the cowboy web server with its dispatch rules and the configuration of 183 | cowboy. 184 | 185 | .`src/sw_web_graphql_handler.erl` 186 | 187 | The main handler for GraphQL requests in the system. It provides 188 | transport between GraphQL and HTTP. 189 | 190 | .`src/sw_web_sup.erl` 191 | 192 | Main supervisor. Currently it has no children, but exists as a way to 193 | appease the application controller by giving the application a 194 | specific `pid()` it can use to know if the application is up and 195 | running. 196 | 197 | .`src/sw_web_response.erl` 198 | 199 | Wrapper around responses. It makes sure that an Erlang term is 200 | representable in JSON by converting something like a tuple into a binary 201 | value. This allows a JSON encoder to handle the Erlang term without 202 | problems. 203 | 204 | Another reason for doing this is that we eliminate a lot of 500 Status 205 | code responses from the system. 206 | 207 | 208 | -------------------------------------------------------------------------------- /doc/enum_resolution.asciidoc: -------------------------------------------------------------------------------- 1 | [[enum-resolution]] 2 | 3 | == Enum Resolution 4 | 5 | GraphQL defines a special kind of scalar type, namely the _enum_ type. 6 | An enumerated type is a one which can take a closed set of values, 7 | only. 8 | 9 | By convention, GraphQL systems tend to define these as all upper-case 10 | letters, but that is merely a convention to make them easy to 11 | distinguish from other things in a GraphQL query document. 12 | 13 | Erlang requires some thought about these. On one hand, we have an 14 | obvious representation internally in Erlang by using an `atom()` type, 15 | but these are not without their drawbacks: 16 | 17 | * The table of atoms are limited in Erlang. So if you can create them 18 | freely, you end up exhausting the atom table eventually. Thus, you 19 | cannot have an "`enemy`" create them. 20 | * In Erlang, atoms which begin with an upper-case letter has to be 21 | quoted. This is not always desirable. 22 | * Many transport formats, database backends and so on does not support 23 | atom types well. They don't have a representation of what scheme 24 | calls a "symbol". So in that case they need handling. 25 | 26 | Because of this, the {project} defines an enum mapping construction exactly 27 | like the one we have for <>. This allows the 28 | programmer to translate enums as they enter or leave the system. This 29 | provides the ability to change the data format to something which has 30 | affordance in the rest of the system. In short, enums undergo coercion 31 | just like any other value. 32 | 33 | .Enum input 34 | 35 | In GraphQL, there are two paths for inputting an enum value: query 36 | document and query parameters. In the query document an enum is given 37 | as an unquoted value. It is _not_ legal to input an enum as a string 38 | in the query document (presumably to eliminate some errors up front). 39 | In contrast, in the parameter values, we are at the whim of its 40 | encoding. JSON is prevalent here and it doesn't have any encoding of 41 | enums. Hence, they are passed as strings here. 42 | 43 | In order to simplify the input coercion code for these, we always pass 44 | them to coercers as binary data. This makes it such that developers 45 | only have to cater for one path here. 46 | 47 | === Defining enums 48 | 49 | You define enum values in the schema as mandated by the GraphQL 50 | specification. In the {star-wars} schema, we define the different film 51 | episodes like so 52 | 53 | [source,graphql] 54 | ---- 55 | include::{sw_core}/priv/sw.schema[tags=enumEpisode] 56 | ---- 57 | 58 | which defines a new enum type `Episode` with the possible values 59 | `PHANTOM, CLONES, ...`. 60 | 61 | === Coercion 62 | 63 | In order to handle these enum values internally inside a server, we 64 | need a way to translate these enum values. This is done by a coercer 65 | module, just like <>. First, we introduce a 66 | mapping rule 67 | 68 | ---- 69 | #{ ... 70 | enums => #{ 'Episode' => sw_core_enum }, 71 | ... } 72 | ---- 73 | 74 | In the schema mapping (see <> for the full 75 | explanation). This means that the type `Episode` is handled by the 76 | coercer module `sw_core_enum`. 77 | 78 | The module follows the same structure as in <>. You 79 | define two functions, `input/2` and `output/2` which handle the 80 | translation from external to internal representation and vice versa. 81 | 82 | [source,erlang] 83 | ---- 84 | include::{sw_core}/src/sw_core_enum.erl[tags=coreEnum] 85 | ---- 86 | <1> Conversion in the External -> Internal direction 87 | <2> Conversion in the Internal -> External direction 88 | 89 | In the example we turn binary data from the outside into appropriate 90 | atoms on the inside. This is useful in the case of our {star-wars} 91 | system because the Mnesia database is able to handle atoms directly. 92 | This code also protects our system against creating illegal atoms: 93 | partially because the coercer module cannot generate them, but also 94 | because the GraphQL type checker rejects values which are not valid 95 | enums in the schema. 96 | 97 | In the output direction, our values are already the right ones, so we 98 | can just turn them into binaries. 99 | 100 | NOTE: The GraphQL system doesn't trust an output coercion function. It 101 | will check that the result indeed matches a valid enum value. If it 102 | doesn't the system will `null` the value and produce an error with an 103 | appropriately set `path` component. 104 | 105 | === Usage Example 106 | 107 | In GraphQL, we can run a query which asks for a film by its episode 108 | enum and then obtain some information on the film in question: 109 | 110 | [source,graphql] 111 | ---- 112 | include::{sw_test}/sw_SUITE_data/enum.query[] 113 | ---- 114 | 115 | Note how we use the value `JEDI` as an enum value for the episode in 116 | question. The GraphQL type checker, or your <> system will 117 | report errors if you misuse the enum value in this case. 118 | 119 | The output is as one expects from GraphQL: 120 | 121 | [source,graphql] 122 | ---- 123 | include::{sw_test}/sw_SUITE_data/enum.result[] 124 | ---- 125 | 126 | Here, the field `episode` returns the _string_ `"JEDI"` because the 127 | JSON output has no way of representing an enum value. This is the 128 | GraphQL default convention in this case. Likewise, enum input as a 129 | query parameter, e.g. as part of `query Q($episode : Episode) { ... 130 | }`, should set the `$episode` value to be a string: 131 | 132 | [source,json] 133 | ---- 134 | { "episode" : "EMPIRE", 135 | ... 136 | } 137 | ---- 138 | 139 | Which will be interpreted by the {project} as an enumerated value. 140 | 141 | -------------------------------------------------------------------------------- /doc/errors.asciidoc: -------------------------------------------------------------------------------- 1 | [[error-handling]] 2 | == Error Handling 3 | 4 | TBD 5 | 6 | Sections to be written: 7 | 8 | * Handling invalid terms around `jsx`. 9 | 10 | -------------------------------------------------------------------------------- /doc/getting_started.asciidoc: -------------------------------------------------------------------------------- 1 | [[getting_started]] 2 | 3 | == Getting Started 4 | 5 | This tutorial takes you through the creation of a GraphQL server 6 | implementing the now ubiquitous _Star Wars_ API. This API was created 7 | a couple of years ago to showcase a REST interface describing good 8 | style for creation of APIs. The system revolves around a database 9 | containing information about the Star Wars universe: species, planets, 10 | starships, people and so on. 11 | 12 | GraphQL, when it was first released, ported the Star Wars system from 13 | REST to GraphQL in order to showcase how an API would look once 14 | translated. Because of its ubiquity, we have chosen to implement this 15 | schema in the tutorial you are now reading: 16 | 17 | * It is a small straightforward example. Yet it is large enough that 18 | it will cover most parts of GraphQL. 19 | * If the reader is already familiar with the system in another GraphQL 20 | implementation, it makes pickup of {project} faster. 21 | * We can use a full system as a driving example for this tutorial. 22 | * If {project} has a bug, it may be possible to showcase the bug 23 | through this repository. This makes it easier to work on since you 24 | have immediate common ground. 25 | 26 | The goal of the tutorial is to provide a developer with a working 27 | example from which you can start. Once completed, you can start adding 28 | your own types to the tutorial. And once they start working, you can 29 | "take over" the system and gradually remove the Star Wars parts until 30 | you have a fully working example. 31 | 32 | This implementation backs the system by means of a Mnesia database. 33 | The choice is deliberate for a couple of reasons: 34 | 35 | * Mnesia is present in any Erlang system and thus it provides a simple 36 | way to get started and setup. 37 | * Mnesia is *not* a Graph Database. This makes it explicit your 38 | database can be anything. In fact, the "Graph" in GraphQL is 39 | misnomer since GraphQL works even when your data does not have a 40 | typical Graph-form. It is simply a nice query structure. 41 | 42 | === What we do not cover 43 | 44 | This tutorial doesn't cover everything in the repository: 45 | 46 | * The details of the `rebar3` integration and the `relx` release 47 | handling. 48 | * The tutorial only covers the parts of the code where there is 49 | something to learn. The areas of the code getting exposition in this 50 | document is due to the fact that they convey some kind of important 51 | information about the use of the GraphQL system for Erlang. Other 52 | parts, which are needed for completeness, but aren't as important 53 | are skipped. 54 | * There is no section on "`how do I set up an initial Erlang 55 | environment`" as it is expected to be done already. 56 | 57 | === Overview 58 | 59 | The purpose of a GraphQL server is to provide a contract between a 60 | client and a server. The contract ensures that the exchange of 61 | information follows a specific structure, and that queries and 62 | responses are in accordance with the contract specification. 63 | 64 | Additionally, the GraphQL servers contract defines what kind of 65 | queries are possible and what responses will look like. Every query 66 | and response is typed and a type checker ensures correctness of data. 67 | 68 | Finally, the contract is introspectable by the clients. This allows 69 | automatic deduction of queries and built-in documentation of the 70 | system interface. 71 | 72 | Thus, a GraphQL server is also a contract checker. The GraphQL system 73 | ensures that invalid queries are rejected, which makes it easier to 74 | implement the server side: you can assume queries are valid to a far 75 | greater extent than is typical in other systems such as typical REST 76 | interfaces. 77 | 78 | === Plan 79 | 80 | In order to get going, we need a world in which to operate. First, we 81 | must provide two schemas: one for the GraphQL system, and one for the 82 | Mnesia database. 83 | 84 | The GraphQL schema defines the client/server contract. It consists of 85 | several GraphQL entity kinds. For example: 86 | 87 | * Scalar types--Extensions on top of the default types. Often used 88 | for Dates, DateTimes, URIs, Colors, Currency, Locales and so on. 89 | * Enumerations--Values taken from a limited set. An example could be 90 | the enumeration of weekdays: "`MONDAY, TUESDAY, WEDNESDAY, ..., 91 | SUNDAY`". 92 | * Input Objects--Data flowing from the Client to the Server (Request). 93 | * Output Objects--Data flowing from the Server to the Client 94 | (Response). 95 | 96 | A somewhat peculiar choice by the GraphQL authors is that the world of 97 | Input and Output objects differ. In general, a Client has no way to 98 | "_PUT_" an input object back into the Graph as is the case in REST 99 | systems. From a type-level perspective, client requests and server 100 | responses have different _polarity_. 101 | 102 | //// 103 | TODO: Explain/Link to explanation of "polarity" or leave out. 104 | Wiki page on type theory doesn't even contain the word "polarity": 105 | https://en.wikipedia.org/wiki/Type_theory 106 | //// 107 | 108 | It may seem as if this is an irritating choice. You often have to 109 | specify the "`same`" object twice: once for input and once for output. 110 | However, as your GraphQL systems grows in size, it turns out this 111 | choice is the right one. You quickly run into situations where a 112 | client supplies a desired specific change where many of the fields on 113 | the output object doesn't make sense. By splitting the input and 114 | output world, it is easy to facilitate since the input objects can 115 | omit many fields that doesn't make sense. 116 | 117 | In a way, your GraphQL system is built such that changes to the data 118 | is done by executing "`transactions`" through a set of stored 119 | procedures. This can be seen as using the _"`PATCH`"_ method of RESTful 120 | interfaces and not having a definition of PUT. 121 | 122 | .CQRS 123 | 124 | GraphQL splits the schema into two worlds: _query_ and _mutation_. The 125 | difference from the server side is mostly non-existent: the GraphQL 126 | system is allowed to parallelize queries but not mutations. But from 127 | the perspective of the client, the starting points in the graph is 128 | either the _query_ or the _mutation_ object. 129 | 130 | GraphQL implements what is essentially <> by making a distinction 131 | between the notion of a _query_ and a _mutation_. Likewise, the server 132 | side makes this distinction. But on the server side it is merely 133 | implemented by having different starting objects in the graph 134 | execution. 135 | 136 | .Mnesia 137 | 138 | Our Star Wars schema uses the database *Mnesia* as a backend. It is 139 | important to stress that you often have a situation where your 140 | database backend doesn't map 1-1 onto your specified GraphQL schema. 141 | In larger systems, this is particularly important: the GraphQL schema 142 | is often served by multiple different backends, and those backends are 143 | not going to cleanly map onto the world we expose to the clients. So 144 | the GraphQL schema contract becomes a way to mediate between the 145 | different data stores. As an example, you may satisfy some parts of 146 | the GraphQL query from a dedicated search system--such as 147 | ElasticSearch--while others are served as rows from a traditional 148 | database, such as MySQL or Postgresql. You may even have a message 149 | queue broker or some other subsystem in which you have relevant data 150 | you want to query. Or perhaps, some queries are handled by 151 | micro-services in your architecture. 152 | 153 | Over the course of having built larger systems, we've experienced that 154 | mappings which tries to get isomorphism between the backend and the 155 | schema creates more problems than they solve. Small changes have 156 | consequence in all of the stack. Worse, you can't evolve part of the 157 | system without evolving other parts which impairs the flexibility of 158 | the system. 159 | 160 | Another problem is that you may end up with an impedance mismatch 161 | between the Objects and links of the GraphQL query and the way you 162 | store your data in the backend. If you force a 1-1 relationship 163 | between the two, you can get into trouble because your GraphQL schema 164 | can't naturally describe data. 165 | 166 | .Mnesia initialization 167 | 168 | A common problem people run into with Mnesia is how to "`get started`". 169 | What people often resort to are solutions where an initial database is 170 | created if it doesn't exist. These solutions are often brittle. 171 | 172 | Here, we pick another solution. A helper can create a database schema 173 | for us, with all the necessary tables. The real release _assumes_ the 174 | presence of an initial database and won't boot without one. This means 175 | the Erlang release is simpler. There is always some database from 176 | which it can boot and operate. That database might be the empty 177 | database since we are just starting out. But in particular, the 178 | release won't concern itself with creating an initial database. Rather 179 | it will assume one is already existing. 180 | 181 | The situation is not much different than using a traditional 182 | schema-oriented database. Usually, you have to create the database 183 | first, and then populate the schema with some initial data. It is just 184 | because of Rails/Django like systems in which databases are 185 | migrate-established, we've started using different models. 186 | 187 | == Mnesia 188 | === Setting up an initial Mnesia schema 189 | 190 | To get up and running, we begin by constructing a Mnesia schema we can 191 | start from. We do this by starting a shell on the Erlang node and then 192 | asking it to create the schema: 193 | 194 | [source] 195 | ---- 196 | $ git clean -dfxq # <1> 197 | $ make compile # <2> 198 | $ make shell-schema # <3> 199 | erl -pa `rebar3 path` -name sw@127.0.0.1 200 | Erlang/OTP 19 [erts-8.3] [source] [64-bit] [smp:8:8] [async-threads:10] [hipe] [kernel-poll:false] [dtrace] 201 | 202 | Eshell V8.3 (abort with ^G) 203 | 1> sw_core_db:create_schema(). % <4> 204 | ---- 205 | <1> Clean out the source code repository to make sure there is no lingering files: 206 | Caution when using this command as it could potentially delete files if used in wrong directory. 207 | <2> Compile the code so we have compiled versions of modules we can loaded 208 | <3> Run the Erlang interpreter with an altered path for our newly compiled modules 209 | <4> Create the schema 210 | 211 | The call `create_schema()` runs the following schema creation code: 212 | 213 | [source,erlang] 214 | ---- 215 | include::{sw_core}/src/sw_core_db.erl[tags=createSchema] 216 | ---- 217 | 218 | Creating the schema amounts to running a set of commands from the 219 | Mnesia documentation. The helper function to create tables contains a 220 | large number of tables, so we are just going to show two here: 221 | 222 | [source,erlang] 223 | ---- 224 | include::{sw_core}/src/sw_core_db.erl[tags=createTables] 225 | ---- 226 | 227 | In Mnesia, tables are Erlang records. The `#planet{}` record needs 228 | definition and is in the header file `sw_core_db.hrl`. We simply list 229 | the entries which are defined the SWAPI GraphQL schema so we can store 230 | the concept of a planet in the system: 231 | 232 | [source,erlang] 233 | ---- 234 | include::{sw_core}/src/sw_core_db.hrl[tags=planetRecord] 235 | ---- 236 | 237 | Every other table in the system is handled in the same manner, but are 238 | not given here for brevity. They follow the same style as the example above. 239 | 240 | === Populating the database 241 | 242 | Once we have introduced tables into the system, we can turn our 243 | attention to populating the database tables. For this, we use the 244 | SWAPI data set as the primary data source. This set has its fixtures 245 | stored as JSON document. So we use `jsx` to decode those JSON 246 | documents and turn them into Mnesia records, which we then insert into 247 | the database. 248 | 249 | We can fairly easily write a transformer function which take the JSON 250 | terms and turn them into appropriate Mnesia records. Planets live in a 251 | fixture file `planets.json`, which we can read and transform. Some 252 | conversion is necessary on the way since the internal representation 253 | differ slightly from the representation in the fixture: 254 | 255 | [source,erlang] 256 | ---- 257 | include::{sw_core}/src/sw_core_db.erl[tags=json_to_planet] 258 | ---- 259 | 260 | Once we have this function down, we can utilize it to get a list of 261 | Mnesia records, which we can then insert into the database through a 262 | transaction: 263 | 264 | [source,erlang] 265 | ---- 266 | include::{sw_core}/src/sw_core_db.erl[tags=populate_planets] 267 | ---- 268 | 269 | The code to read in and populate the database is fairly 270 | straightforward. It is the last piece of the puzzle to inject relevant 271 | data into the Mnesia database: 272 | 273 | [source,erlang] 274 | ---- 275 | include::{sw_core}/src/sw_core_db.erl[tags=populatingTables] 276 | ---- 277 | 278 | 279 | This creates a fixture in the database such that when we boot the 280 | database, the planets, transports, people, ..., will be present in the 281 | Mnesia database when we boot the system. 282 | 283 | === Creating a FALLBACK for the database 284 | 285 | Once we have run the schema creation routine, a file called 286 | `FALLBACK.BUP` is created. We copy this to the database base core in 287 | the repository 288 | [source,bash] 289 | ---- 290 | $ cp FALLBACK.BUP db/FALLBACK.BUP 291 | ---- 292 | which makes the empty schema available for the release manager of the 293 | Erlang system. When we cook a release, we will make sure to copy this 294 | initial schema into the correct Mnesia-directory of the release. 295 | Because the file is named `FALLBACK.BUP`, it is a fallback backup file. 296 | This will "`unpack`" itself to become a new empty database as if you 297 | had rolled in a backup on the first boot of the system. Thus we avoid 298 | our system having to deal with this problem at start up. 299 | 300 | NOTE: A real system will override the location of the Mnesia `dir` 301 | parameter and define a separate directory from which the Mnesia 302 | database will run. Initially, the operator will place the 303 | `FALLBACK.BUP` file in this directory to get going, but once we are 304 | established, and people start adding in data, we can't reset anything 305 | when deploying new versions. Hence the separate directory so we can 306 | upgrade the Erlang system without having to protect the database as 307 | much. 308 | 309 | We now have the ability to create new database tables easily and we 310 | have a Mnesia database for backing our data. This means we can start 311 | turning our attention to the GraphQL schema. 312 | -------------------------------------------------------------------------------- /doc/graphiql.asciidoc: -------------------------------------------------------------------------------- 1 | [[graphiql]] 2 | == GraphiQL 3 | 4 | The ubiquitous front-end for GraphQL servers is a system called 5 | GraphiQL, https://github.com/graphql/graphiql, which provides a nice 6 | user interface for a GraphQL server. We use this system as the 7 | front-end in the demo, whereas real applications will of course skip 8 | this front-end and just call directly to the GraphQL backend. 9 | 10 | Having a nice UI for a GraphQL server helps tremendously in 11 | development however. The UI uses the _introspection_ features of 12 | GraphQL which is built into {project}. It can thus request the 13 | schema types from the server and use that information to present a 14 | nice user interface. 15 | 16 | We have already provided cowboy dispatchers for GraphiQL (see 17 | <>). The only thing we have to do is to build a 18 | minified version of GraphiQL and place it in our `site/assets` folder 19 | inside our `priv` directory in the application `sw_web`. We also 20 | provide a default `index.html` to load when the root URL `/` is 21 | requested. 22 | 23 | Since we bind the GraphQL server to {port-number} by default, you 24 | can access to the GraphiQL system by starting the release: 25 | 26 | [source,bash] 27 | ---- 28 | $ make release 29 | $ _build/default/rel/sw/bin/sw console 30 | ---- 31 | 32 | And once the system is up and running, you can access it on 33 | http://localhost:{port-number}/ It will look like 34 | 35 | [#img-graphiql] 36 | .The Graphiql User Interface 37 | image::graphiql.png[GraphiQL] 38 | 39 | The GraphiQL User Interface provides a number of features for the 40 | developer: 41 | 42 | * The system provides documentation by clicking the `Docs` tab. The 43 | documentation is searchable and fully introspectable. 44 | * The system provides auto-completion and guidance when developing 45 | queries. It uses the introspection features to figure out what can 46 | be written. 47 | 48 | Let us run a simple example query in the interface. Since we have 49 | 50 | [source] 51 | ---- 52 | Eshell V8.3 (abort with ^G) 53 | (sw@127.0.0.1)1> base64:encode("Planet:3"). 54 | <<"UGxhbmV0OjM=">> 55 | ---- 56 | 57 | we can write a query for this particular planet: 58 | 59 | [source,graphql] 60 | ---- 61 | query PlanetQuery { 62 | node(id: "UGxhbmV0OjM=") { 63 | ... on planet { 64 | id 65 | name 66 | climate 67 | } 68 | } 69 | } 70 | ---- 71 | 72 | The GraphiQL interface is a nice development and debugging tool. We 73 | keep it available for production as well in a security gating because 74 | it is nice you can build a query on the fly if something is odd with a 75 | data set. 76 | 77 | Note that GraphiQL creates a very large URL containing the query 78 | itself. This is also very useful as you can send queries between 79 | people by pasting links. In a development setting, you can then talk 80 | about a particular query which doesn't operate as expected. 81 | 82 | 83 | 84 | -------------------------------------------------------------------------------- /doc/images/graphiql.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jlouis/graphql-erlang-tutorial/c2cebedf45e8b2bd3eb798977ebb72e13b9bfd05/doc/images/graphiql.png -------------------------------------------------------------------------------- /doc/introduction.asciidoc: -------------------------------------------------------------------------------- 1 | [[introduction]] 2 | 3 | == Introduction 4 | 5 | The {project} system allows you to implement GraphQL servers in 6 | Erlang. It works as a library which you can use on top of existing web 7 | servers such as Cowboy, Webmachine, Yaws and so on. 8 | 9 | As a developer, you work by providing a _schema_ which defines the 10 | query structure which your server provides. Next, you map your schema 11 | unto Erlang modules which then defines a binding of the two worlds. 12 | 13 | Clients execute _queries_ to the server according to the structure of 14 | the schema. The GraphQL system then figures out a _query plan_ for the 15 | query and executes the query. This in turn calls your bound modules 16 | and this allows you to process the query, load data, and so on. 17 | 18 | .Changes 19 | 20 | For a complete list of changes over time to this document, take a look 21 | at the <> appendix. 22 | 23 | === On this tutorial 24 | 25 | CAUTION: We are currently building the document and are still making 26 | changes to it. Things can still move around and change. If you see a 27 | "`TBD`" marker it means that section is "`To Be Done`" and will be 28 | written at a later point. In the same vein, the code base is being 29 | built up as well, so it may not be that everything is fully described 30 | yet. 31 | 32 | CAUTION: The current version of {project} returns some errors which 33 | are hard to parse and understand. It is our intention to make the 34 | error handling better and more clean in a later version. 35 | 36 | The tutorial you are now reading isn't really a tutorial per se where 37 | you type in stuff and see the output. There is a bit too much code for 38 | that kind of exposition. Rather, the tutorial describes a specific 39 | project implemented by means of the GraphQL system. You can use the 40 | ideas herein to build your own. 41 | 42 | There are examples of how things are built however, so you may be able 43 | to follow along and check out the construction of the system as a 44 | whole. Apart from being a small self-contained functional GraphQL 45 | project, it is also a small self-contained functional rebar3 project. 46 | So there's that. 47 | 48 | === Prerequisites 49 | 50 | Some Erlang knowledge is expected for reading this guide. General 51 | Erlang concept will not be explained, but assumed to be known. Some 52 | Mnesia knowledge will also help a bit in understanding what is going 53 | on, though if you know anything about databases in general, that is 54 | probably enough. Furthermore, some knowledge of the web in general is 55 | assumed. We don't cover the intricacies of HTTP 1.1 or HTTP/2 for 56 | instance. 57 | 58 | This tutorial uses a couple of dependencies: 59 | 60 | * Rebar3 is used to build the software 61 | * Cowboy 1.x is used as a web server for the project 62 | * GraphiQL is used as a web interface to the Graph System 63 | * Erlang/OTP version 19.3.3 was used in the creation of this tutorial 64 | 65 | === Supported Platforms 66 | 67 | The GraphQL system should run on any system which can run Erlang. The 68 | library does not use any special tooling, nor does it make any 69 | assumptions about the environment. If Erlang runs on your platform, 70 | chances are that GraphQL will too. 71 | 72 | === Comments & Contact 73 | 74 | The official repository location is 75 | 76 | https://github.com/shopgun/graphql-erlang-tutorial 77 | 78 | If you have comments on the document or corrections, please open an 79 | https://github.com/shopgun/graphql-erlang-tutorial/issues[Issue] in 80 | the above repository on the thing that is missing. Also, feel free to 81 | provide pull requests against the code itself. 82 | 83 | Things we are particularly interested in: 84 | 85 | * Parts you don't understand. These often means something isn't 86 | described well enough and needs improvement. 87 | * Code sequences that doesn't work for you. There is often some 88 | prerequisite the document should mention but doesn't. 89 | * Bad wording. Things should be clear and precise. If a particular 90 | sentence doesn't convey information clearly, we'd rather rewrite it 91 | then confuse the next reader. 92 | * Bugs in the code base. 93 | * Bad code structure. A problem with a tutorial repository is that it 94 | can "`infect`" code in the future. People copy from this repository, 95 | so if it contains bad style, then that bad style is copied into 96 | other repositories, infecting them with the same mistakes. 97 | * Stale documentation. Parts of the documentation which were relevant 98 | in the past but isn't anymore. For instance ID entries which doesn't 99 | work anymore. 100 | 101 | === License 102 | 103 | Copyright (C) 2017 {shopgun}. 104 | 105 | Licensed under the Apache License, Version 2.0 (the "License"); 106 | you may not use this file except in compliance with the License. 107 | You may obtain a copy of the License at 108 | 109 | http://www.apache.org/licenses/LICENSE-2.0 110 | 111 | Unless required by applicable law or agreed to in writing, software 112 | distributed under the License is distributed on an "AS IS" BASIS, 113 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 114 | See the License for the specific language governing permissions and 115 | limitations under the License. 116 | 117 | === Acknowledgments 118 | 119 | * Everyone involved in the Star Wars API. We use that data extensively. 120 | * The GraphQL people who did an excellent job at answering questions 121 | and provided us with a well-written specification. 122 | * Josh Price. The parser was derived from his initial work though it 123 | has been changed a lot since the initial commit. 124 | -------------------------------------------------------------------------------- /doc/relay_modern.asciidoc: -------------------------------------------------------------------------------- 1 | [[relay-modern]] 2 | == Relay Modern 3 | 4 | Facebook's use of GraphQL adds a layer on top through the {relay} 5 | framework. This layer adds some standards on top of the GraphQL system 6 | such that it has uniform ways of handling problems. The interaction 7 | with GraphQL is defined through specifications for each part. 8 | 9 | This chapter explains the concepts in relation to {project} and how 10 | one will achieve those standard pieces. 11 | 12 | === Node Interface 13 | 14 | {relay} defines an object identification specification together 15 | with an interface *Node* used to retrieve objects which are cached on 16 | the client side. The specification 17 | https://facebook.github.io/relay/graphql/objectidentification.htm 18 | defines the details of this. 19 | 20 | This tutorial already implements the Node interface. The section 21 | <> talks about the creation of globally unique 22 | identifiers, and the section on <> describes 23 | the concept of a *Node*. Finally, the section <> describes 24 | how nodes are loaded, generically, from the database backend. 25 | 26 | Taken together, this implements the object identification 27 | specification.footnote:[You must also do "`Node plurality`" correct, 28 | through this tutorial doesn't have a place where that gets used. Node 29 | plurality is the idea that if you load objects by a list, the return 30 | value retains the order in the list.] 31 | 32 | [[inputs-and-payloads]] 33 | === Inputs & Payloads 34 | 35 | TBD 36 | 37 | [[pagination]] 38 | === Pagination 39 | 40 | The {relay} pagination specification 41 | (https://facebook.github.io/relay/graphql/connections.htm) defines how 42 | pagination connections and cursors are supposed to work. We have a 43 | simple implementation of these ideas in Erlang in this tutorial. 44 | 45 | Real world systems will benefit from having a close linkage between a 46 | given data source and the pagination system. You can gain lots of 47 | efficiency if you request data after you know what window the client 48 | desired. The implementation is faithful to the specification and can 49 | be used as a start. 50 | 51 | Furthermore, different data sources tend to provide different ways to 52 | paginate. An RDBMS can use the OFFSET/LIMIT pairs, or a time-interval 53 | column. [Oracle and MS SQL Server use different notions, but can 54 | achieve the same thing] Some systems provide cursors which can be 55 | sent with a follow-up query. And so on. 56 | 57 | In the {relay} specification, the cursor is a _server side_ 58 | controlled piece of data. A client is not allowed to manipulate it. 59 | This allows the server to use the same pagination scheme for many 60 | different types of data stores. And this provides a large amount of 61 | flexibility. 62 | 63 | The pagination function is called as `select(Elements, Args)` where 64 | `Elements` is the set of edges we are paginating for, and `Args` is a 65 | map containing the fields `first`, `last`, `after`, and `before`. We 66 | expect the elements to be the full results of every eligible elements. 67 | This is possibly large and should be optimized in a real 68 | implementation. The body of the function looks like the following and 69 | follows the specification very closely: 70 | 71 | [source,erlang] 72 | ---- 73 | include::{sw_core}/src/sw_core_paginate.erl[tags=paginate] 74 | ---- 75 | <1> If the user does not supply either `first` nor `last`, then we set 76 | up a default which requests the first 5 edges. 77 | <2> We compute the total count of elements. 78 | <3> If `after` or `before` is given by the user, cut the window off 79 | after or before a cursor respectively. We also attach the position 80 | of each element by use of `lists:zip/2`. This is later used to 81 | render cursors correctly on the data. 82 | <4> Given the cut `Sliced` pick either the first or last `K` elements 83 | in that window. Then build the map `#{ node => Edge, cursor => 84 | Cursor }` via the function `format/1`. 85 | <5> Compute the `PageInfo` object. 86 | <6> Return the desired result as a map. 87 | 88 | .apply_cursors_to_edges/3 89 | 90 | This function cuts off a window with respect to either the `before` or 91 | the `after` cursor. We can handle this through pattern matching in 92 | Erlang: 93 | 94 | [source,erlang] 95 | ---- 96 | include::{sw_core}/src/sw_core_paginate.erl[tags=applyCursorsToEdges] 97 | ---- 98 | 99 | The function is pretty straightforward, since the cursor contains the 100 | position at which to cut. So we can simply split the element list at 101 | the right point and return it. 102 | 103 | .edges_to_return/3 104 | 105 | This function evaluates the `first` and `last` parameters and only 106 | returns the first/last `K` elements of the cut-off window. It follows 107 | a simple scheme: 108 | 109 | * If given first, we compare the size of the window to the desired 110 | number of elements. We then limit the window to the correct amount 111 | of elements. 112 | * If given last, we rewrite the task so it looks as if it were a 113 | first-type task. Then we execute this task--finally rewriting back 114 | to the original form 115 | 116 | [source,erlang] 117 | ---- 118 | include::{sw_core}/src/sw_core_paginate.erl[tags=edgesToReturn] 119 | ---- 120 | 121 | .PageInfo object 122 | 123 | To build up the PageInfo object, we use the following small helpers 124 | function which will determine if there is more elements after the 125 | window in either direction. They closely follow the specification: 126 | 127 | [source,erlang] 128 | ---- 129 | include::{sw_core}/src/sw_core_paginate.erl[tags=pageInfo] 130 | ---- 131 | 132 | .Packing cursors 133 | 134 | A cursor in this setup is the base64 encoding of the position: 135 | 136 | [source,erlang] 137 | ---- 138 | include::{sw_core}/src/sw_core_paginate.erl[tags=packCursor] 139 | ---- 140 | 141 | -------------------------------------------------------------------------------- /doc/scalar_resolution.asciidoc: -------------------------------------------------------------------------------- 1 | [[scalar-resolution]] 2 | 3 | == Scalar Resolution 4 | 5 | In a GraphQL specification, the structure of queries are defined by 6 | objects, interfaces and unions. But the "`ground`" types initially 7 | consist of a small set of standard types: 8 | 9 | * Int--Integer values 10 | * Float--Floating point values 11 | * String--Textual strings 12 | * Boolean--Boolean values 13 | * ID--Identifiers: values which are opaque to the client 14 | 15 | These ground types are called _Scalars_. The set of scalars is 16 | extensible with your own types. Some examples of typical scalars to 17 | extend a Schema by: 18 | 19 | * DateTime objects--with or without time zone information 20 | * Email addresses 21 | * URIs 22 | * Colors 23 | * Refined types--Floats in the range 0.0-1.0 for instance 24 | 25 | .Coercion 26 | 27 | Clients input scalar values as strings. Thus, the input string has to 28 | be _input coerced_ by the GraphQL system. Vice versa, when a value is 29 | returned from the GraphQL backend, we must coerce so the client can 30 | handle it. This is called _output coercion_. 31 | 32 | .Validation, Canonicalization & Representation 33 | 34 | The advantage of coercing inputs from the client is that not only can 35 | we validate that the client sent something correct. We can also coerce 36 | different representations at the client side into a canonical one on 37 | the server side. This greatly simplifies the internals, as we can pick 38 | a different internal representation than one which the client operates 39 | with. 40 | 41 | In particular, we can chose an internal representation which is 42 | unrepresentable on the client side. That is, the client could be Java 43 | or JavaScript and neither of those languages has a construct for 44 | tuples which is nice to work with. At least not when we consider JSON 45 | as a transport for those languages. Yet, due to canonicalization, we 46 | may still use tuples and atoms internally in our Erlang code, as long 47 | as we make sure to output-coerce values such that they are 48 | representable by the transport and by the client. 49 | 50 | .Star Wars and Scalar values 51 | 52 | In the Star Wars schema, we have defined scalar *DateTime* which we 53 | use to coerce datetimes. If a client supplies a datetime, we will run 54 | that through the `iso8601` parsing library and obtain a 55 | `calendar:datetime()` tuple in Erlang. On output coercion, we will 56 | convert it back into ISO8601/RFC3339 representation. This demonstrates 57 | the common phenomenon in which an internal representation (tuples) are 58 | not realizable in the external representation--yet we can work around 59 | representation problems through coercion: 60 | 61 | .Schema Definition (`sw.schema`) 62 | [source,erlang] 63 | ---- 64 | include::{sw_core}/priv/sw.schema[tags=scalarDateTime] 65 | ---- 66 | 67 | We have arranged that data loaded into Mnesia undergoes iso8601 68 | conversion by default such that the internal data are 69 | `calendar:datetime()` objects in Mnesia. When output-coercing these 70 | objects, the GraphQL system realizes they are of type *DateTime*. This 71 | calls into the scalar conversion code we have mapped into the Star 72 | Wars schema: 73 | 74 | .Coercion code (`sw_core_scalar.erl`) 75 | [source,erlang] 76 | ---- 77 | include::{sw_core}/src/sw_core_scalar.erl[tags=scalarRepr] 78 | ---- 79 | 80 | A scalar coercion is a pair of two functions 81 | 82 | input/2:: Called whenever an scalar value needs to be coerced from 83 | _client_ to _server_. The valid responses are `{ok, Val} | {error, 84 | Reason}`. The converted response is substituted into the query so 85 | the rest of the code can work with converted vales only. If `{error, 86 | Reason}` is returned, the query is failed. This can be used to 87 | white-list certain inputs only and serves as a correctness/security 88 | feature. 89 | 90 | output/2:: Called whenever an scalar value needs to be coerced from 91 | _server_ to _client_. The valid responses are `{ok, Val} | {error, 92 | Reason}`. Conversion makes sure that a client only sees coerced 93 | values. If an error is returned, the field is regarded as an error. 94 | It will be replaced by a `null` and <> will occur. 95 | 96 | In our scalar conversion pair, we handle *DateTime* by using the 97 | `iso8601` module to convert to/from ISO8601 representation. We also 98 | handle other manually defined scalar values by simply passing them 99 | through. 100 | 101 | NOTE: Built-in scalars such as Int, Float, String, Bool are handled by 102 | the system internally and do not currently undergo Scalar 103 | conversion. A special case exists for Int and Float. These are coerced 104 | between automatically if it is safe to do so.footnote:[Some people call 105 | this weak typing, but type promotion is probably a more apt term] 106 | 107 | === Example 108 | 109 | Consider the following GraphQL query: 110 | 111 | [source,graphql] 112 | ---- 113 | query SpeciesQ { 114 | node(id: "U3BlY2llczoxNQ==") { 115 | id 116 | ... on Species { 117 | name 118 | created 119 | } 120 | } 121 | } 122 | ---- 123 | 124 | which returns the following response: 125 | 126 | [source,json] 127 | ---- 128 | { 129 | "data": { 130 | "node": { 131 | "created": "2014-12-20T09:48:02Z", 132 | "id": "U3BlY2llczoxNQ==", 133 | "name": "Twi'lek" 134 | } 135 | } 136 | } 137 | ---- 138 | 139 | The `id` given here can be decoded to `"Species:15"`. We can use the 140 | Erlang shell to read in that species: 141 | 142 | [source] 143 | ---- 144 | (sw@127.0.0.1)1> rr(sw_core_db). % <1> 145 | [film,person,planet,sequences,species,starship,transport, 146 | vehicle] 147 | (sw@127.0.0.1)2> mnesia:dirty_read(species, 15). 148 | [#species{id = 15, 149 | edited = {{2014,12,20},{21,36,42}}, 150 | created = {{2014,12,20},{9,48,2}}, <2> 151 | classification = <<"mammals">>,name = <<"Twi'lek">>, 152 | designation = undefined, 153 | eye_colors = [<<"blue">>,<<"brown">>,<<"orange">>, 154 | <<"pink">>], 155 | ...}] 156 | ---- 157 | <1> Tell EShell where the records live so we can get better printing 158 | in the shell. 159 | <2> Note the representation in the backend. 160 | 161 | When the field `created` is requested, the system will return it as 162 | `{{2014,12,20},{9,48,2}}` and because it has type *DateTime* it will 163 | undergo output coercion to the ISO8601 representation. 164 | 165 | How the field is requested and fetched out of the Mnesia database is 166 | described in the section <>. 167 | 168 | -------------------------------------------------------------------------------- /doc/security.asciidoc: -------------------------------------------------------------------------------- 1 | == Security 2 | 3 | This section describes different security aspects of GraphQL and how 4 | they pertain to the Erlang implementation of GraphQL. Any real world 5 | implementation of a system must combat enemies on the web. In general, 6 | you can expect requests to be evil. 7 | 8 | .Accidental maliciousness 9 | 10 | A rather common situation is when the "`malicious`" operation is 11 | accidental. Some user uses your system in a way you did not expect, 12 | and that then brings down your system. It isn't that they have crafted 13 | the query in order to bring down your system on purpose, it is simply 14 | that their use case makes your system go havoc. 15 | 16 | GraphQL servers must be built in a way such that every query has a 17 | limit and some kind of pagination. That way, you avoid that a single 18 | client can request all of your database and then go away before you 19 | can amass the response. By forcing clients to cooperate, you can get 20 | typical limitations such as request limits in play. Thus, any query 21 | that is possibly large, should have an upper bound on itself. You may 22 | also want to have a global upper bound in your query so requests for 23 | more than, say, 6000 objects will start returning errors if it is too 24 | large. 25 | 26 | === Limiting Clients--Stored Procedures 27 | 28 | .Work in Progress 29 | **** 30 | The following section is yet to be implemented in {project}, but 31 | we still mention it here because it is a common request, and it needs 32 | a common answer. 33 | 34 | When we get around to implement the feature, this section will also 35 | contain examples on how to use it. 36 | **** 37 | 38 | GraphQL is a query language. If a client is able to run any query in 39 | the world, you may get into trouble with overload. Your system has to 40 | parse, type check & validate each request. And if the request is 41 | expensive, it puts unnecessary toll on your backend systems. To avoid 42 | this, production implementations support the ability to _prepare_ a 43 | query document containing all the queries a client wants to make. Once 44 | and for all the document is parsed, type checked, and validated. Then 45 | a reference is given back to the client. Clients who wish to run a 46 | query can then supply this reference and an `opName` inside the query 47 | document to run that query. 48 | 49 | This is much faster since the Server only has to execute the query and 50 | can avoid going through the validation steps again and again. While 51 | the {project} system is fast, about 4/5 of a query is 52 | pre-processing time before execution. In other words, you can speed up 53 | the GraphQL by quite a margin if you use stored procedures. 54 | 55 | In addition, you can also arrange that a client isn't able to 56 | construct new query documents without authorization. This means 57 | developers can deploy new query documents when they deploy new 58 | versions of an application, but a user of said application cannot 59 | produce new queries dynamically. 60 | 61 | In short: 62 | 63 | * Developers now have the full dynamic query language at their 64 | disposal 65 | * Users of the application can only proceed by calling prepared stored 66 | procedures. 67 | 68 | It is also possible to build hybrid systems. Let dynamic queries 69 | be limited in the backend to a few at a time. Thus, dynamic queries 70 | are far less likely to "`take out`" your system. 71 | 72 | If you give developers access through an API key, you can demand that 73 | they build query document should they want to run more than, say, 600 74 | queries per hour against your system. This is 10 queries per minute, 75 | which is usually fine for development--Once the system is done, you 76 | provide a query document for preparation, and then the prepared 77 | document is used. 78 | 79 | Another advantage of prepared documents is that the server side 80 | controls what gets executed. This allows you to target a problematic 81 | query at the server side and patch it, for instance by lowering the 82 | size of a pagination window, or making the query simpler by not 83 | providing certain parts. On the other hand, many of those problems 84 | should be fixed by altering the server to become more robust. 85 | 86 | [[authentication]] 87 | === Authentication 88 | 89 | TBD 90 | 91 | [[authorization]] 92 | === Authorization 93 | 94 | TBD 95 | 96 | 97 | -------------------------------------------------------------------------------- /doc/system_tour.asciidoc: -------------------------------------------------------------------------------- 1 | [[system-tour]] 2 | == System Tour 3 | 4 | Since a system as large as GraphQL can seem incomprehensible when you 5 | first use it, we will begin by providing a system tour explaining by 6 | example how the system works. In order to start the system for the 7 | first time, we must construct a _release_. 8 | 9 | .Releases 10 | **** 11 | Erlang uses a concept called _releases_ for real applications. A 12 | release consists of: 13 | 14 | * An erlang emulator for the virtual machine, BEAM as a binary in the 15 | operating system. 16 | * The set of applications which is configured to be inside the 17 | release. 18 | * Every dependent application needed by the release-configuration. 19 | * Support libraries for executing the emulator, binary NIF shared 20 | objects (NIF: Native implemented function), and so on. 21 | * Configuration for the system and runtime 22 | 23 | A release is a self-contained Erlang system living in its own 24 | directory structure. It can be copied to a target machine and started 25 | without having to install any support libraries, as long as the system 26 | architecture is the same. If you provide an appropriate BEAM emulator 27 | for the given architecture, the code will run with little to no 28 | modification as well since the Erlang byte code is highly portable. It 29 | even works across Erlang/OTP versions (subject to some restrictions). 30 | **** 31 | 32 | To make a release, run the following command: 33 | 34 | [source] 35 | ---- 36 | $ make release 37 | ---- 38 | 39 | This builds a release inside the `_build` directory and makes it 40 | available. In order to run the release, we can ask to run it with a 41 | console front-end, so we get a shell on the Erlang system: 42 | 43 | [source] 44 | ---- 45 | $ _build/default/rel/sw/bin/sw console 46 | ---- 47 | 48 | The system should boot and start running. A typical invocation looks 49 | like: 50 | 51 | [source] 52 | ---- 53 | Erlang/OTP 19 [erts-8.3] [source] [64-bit] [smp:8:8] [async-threads:30] [hipe] [kernel-poll:true] [dtrace] 54 | 55 | 15:33:05.705 [info] Application lager started on node 'sw@127.0.0.1' 56 | 15:33:05.705 [info] Application ranch started on node 'sw@127.0.0.1' 57 | 15:33:05.706 [info] Application graphql started on node 'sw@127.0.0.1' 58 | 15:33:05.706 [info] Application sw_core started on node 'sw@127.0.0.1' 59 | 15:33:05.706 [info] Application cowboy started on node 'sw@127.0.0.1' 60 | 15:33:05.706 [info] Starting HTTP listener on port 17290 61 | Eshell V8.3 (abort with ^G) 62 | (sw@127.0.0.1)1> 63 | ---- 64 | 65 | TIP: To exit an Erlang node like this, you can either Ctrl-C twice 66 | which stops the system abruptly. Or you can be nice to the system and 67 | ask it to close gracefully one application at a time by entering 68 | `q().` in the shell. 69 | 70 | Once the Erlang emulator is running our `sw` release, we can point a 71 | browser to http://localhost:{port-number}/ and you should be greeted 72 | with the following screen: 73 | 74 | [#img-graphiql-system-tour] 75 | .The initial greeting screen for the GraphQL system 76 | image::graphiql.png[GraphiQL] 77 | 78 | === First query 79 | 80 | The first query we will run requests a given *Planet* from the system. 81 | This query follows a set of rules, the {relay} GraphQL conventions. 82 | These conventions are formed by Facebook as part of their Relay Modern 83 | system. It defines a common set of functionality on top of the GraphQL 84 | system which clients can rely on. 85 | 86 | In particular, our first query uses the rules of _Object 87 | Identification_ which is a way to load an object for which you already 88 | know its identity. A more complete exposition of the conventions are 89 | in the section <>, but here we skip the introduction for 90 | the sake of brevity: 91 | 92 | [source,graphql] 93 | ---- 94 | include::{sw_test}/sw_SUITE_data/first.query[] 95 | ---- 96 | <1> The *ID* entered here is opaque to the client, and we assume it 97 | was obtained in an earlier query. We will show typical ways to 98 | list things later in this section. 99 | <2> This notation, if you are only slightly familiar with GraphQL is 100 | called an _inline fragment_. The output of the `node` field is of 101 | type *Node* and here we restrict ourselves to the type *Planet*. 102 | <3> This requests the given fields in the particular planet we loaded. 103 | 104 | If you enter this in the GraphiQL left window and press the "`Run`" 105 | button, you should get the following response: 106 | 107 | [source,json] 108 | ---- 109 | include::{sw_test}/sw_SUITE_data/first.result[] 110 | ---- 111 | 112 | Note how the response reflects the structure of the query. This is a 113 | powerful feature of GraphQL since it allows you to build up queries 114 | client side and get deterministic results based off of your 115 | query-structure. 116 | 117 | === More advanced queries 118 | 119 | Let us look at a far more intricate query. In this query, we will also 120 | request a planet, but then we will ask "`what films does this planet 121 | appear in?`" and we will ask "`Who are the residents on the 122 | planet?`"--who has the planet as their homeworld?. 123 | 124 | To do this, we use pagination. We ask for the first 2 films and the 125 | first 3 residents. We also ask for the relevant meta-data of the 126 | connections as we are here: 127 | 128 | [source,graphql] 129 | ---- 130 | include::{sw_test}/sw_SUITE_data/advanced.query[] 131 | ---- 132 | 133 | The `fragment` parts allows your queries to re-use different subsets 134 | of a larger query again and again. We use this here to show off that 135 | capability of GraphQL. The result follows the structure of the query: 136 | 137 | [source,json] 138 | ---- 139 | include::{sw_test}/sw_SUITE_data/advanced.result[] 140 | ---- 141 | 142 | === Simple Mutations 143 | 144 | Now, let us focus on altering the database through a _mutation_. In 145 | GraphQL, this is the way a client runs "`stored procedures`" on the 146 | Server side. The Star Wars example has tooling for factions in the 147 | Star Wars universe, but there are currently no factions defined. Let 148 | us amend that by introducing the rebels: 149 | 150 | [source,graphql] 151 | ---- 152 | include::{sw_test}/sw_SUITE_data/mutation.query[] 153 | ---- 154 | 155 | This query uses the GraphQL feature of input variables. In the UI, you 156 | can click and expand the section _Query Variables_ under the query 157 | pane. This allows us to build a generic query like the one above and 158 | then repurpose it for creating any faction by providing the input 159 | variables for the query: 160 | 161 | [source,json] 162 | ---- 163 | include::{sw_test}/sw_SUITE_data/mutation.input[] 164 | ---- 165 | <1> This is chosen arbitrarily by the client and can be any string. 166 | Here we use an UUID. 167 | 168 | The server, when you execute this query, will respond with the 169 | creation of a new *Faction* and return its id, name and starships: 170 | 171 | [source,json] 172 | ---- 173 | include::{sw_test}/sw_SUITE_data/mutation.result[] 174 | ---- 175 | <1> The server reflects back the unique client-generated Id for 176 | correlation purposes. 177 | <2> The Id migth be different depending on how many *Faction* objects 178 | you created. 179 | <3> We have yet to assign any starships to the faction, so the count 180 | is currently 0. 181 | 182 | We can now query this faction by its Id because it was added to the 183 | system: 184 | 185 | [source,graphql] 186 | ---- 187 | include::{sw_test}/sw_SUITE_data/faction.query[] 188 | ---- 189 | 190 | The system also persisted the newly created faction in its database so 191 | restarting the system keeps the added faction. 192 | 193 | CAUTION: Use `q()` in the shell to close the system gracefully. 194 | Otherwise you may be in a situation where a change isn't reflected on 195 | disk. The system will still load a consistent view of the database, 196 | but it will be from before the transaction were run. The Mnesia system 197 | used is usually quick at adding data to its WAL, but there is no 198 | guarantee. 199 | 200 | === More complex mutations 201 | 202 | With the rebels in the Graph, we can now create a new Starship, a 203 | B-Wing, which we will add to the graph. We will also attach it to the 204 | newly formed faction of Rebels. The mutation here exemplifies 205 | operations in which you bind data together in GraphQL. Our mutation 206 | looks like: 207 | 208 | [source,graphql] 209 | ---- 210 | include::{sw_test}/sw_SUITE_data/bwing.query[] 211 | ---- 212 | <1> The values here are not for a "`real`" B-wing fighter, but are 213 | just made up somewhat arbitrarily. 214 | <2> The ID of the Faction. If you run this the ID may be a bit 215 | different so make sure you get the right ID here. 216 | 217 | We create a new Starship, a B-wing, in the Rebels faction. Note the 218 | resulting object, *IntroduceStarshipPayload*, contains the newly 219 | created *Starship* as well as the *Faction* which was input as part of 220 | the query. This is common in GraphQL: return every object of interest 221 | as part of a mutation. 222 | 223 | The result of the query is: 224 | 225 | [source,json] 226 | ---- 227 | include::{sw_test}/sw_SUITE_data/bwing.result[] 228 | ---- 229 | 230 | Note how the newly formed starship is now part of the Rebel factions 231 | starships, and that the total count of starships in the Faction is now 232 | 1. The `created` field on the Starship is automatically generated by 233 | the system as part of introducing it. 234 | 235 | Note: Not all the fields on the newly formed starship are "valid" 236 | insofar we decided to reduce the interface here in order to make it 237 | easier to understand in the tutorial. A more complete solution would 238 | force us to input every field on the *Starship* we just introduced and 239 | also use sensible defaults if not given. 240 | 241 | === This tutorial 242 | 243 | This tutorial will tell you how to create your own system which can 244 | satisfy queries as complex and complicated as the examples we just 245 | provided. It will explain the different parts of the GraphQL system 246 | and how you achieve the above. 247 | -------------------------------------------------------------------------------- /doc/terms.asciidoc: -------------------------------------------------------------------------------- 1 | == Terminology 2 | 3 | This section defines terminology used in GraphQL that doesn't fit in 4 | the rest of the document. It is used as a reference to describe 5 | certain behaviors in a GraphQL implementation. 6 | 7 | [[null-propagation]] 8 | === Null Propagation 9 | 10 | In GraphQL, fields are _nullable_ by default. A generic field `f : T` can 11 | either take on the value of `T` or the value `null` if the rendering 12 | of the field fails for some reason. 13 | 14 | In contrast, a field can be non-nullable, `f : T!` in which case the 15 | field is not allowed to take on the value of `null`. 16 | 17 | If you try to complete a non-null field in an object, and `null` is 18 | returned, or an error occur, then the whole object becomes `null`. 19 | This notion _propagates_ until all of the query becomes `null` or we 20 | reach a nullable field, whichever comes first. 21 | 22 | TIP: If you are accustomed to writing statically typed programs, you 23 | may desire to mark as many fields as possible non-null. But the 24 | sacrifice made by doing so is that you can't return partial results. 25 | GraphQL servers are often distributed in nature and handle distributed 26 | backends. Thus, it is fairly often the case that some part of the 27 | system is down, while other parts of the system is up. By having some 28 | fields nullable, you allow the system to null out failing subsystems, 29 | while still providing answers for the parts of the query that can be 30 | fulfilled currently. Too many non-nullable types will make your system 31 | brittle as every document is an all-or-nothing approach. 32 | 33 | [[HATEOAS]] 34 | === Hypertext as the engine of application state 35 | 36 | Hypertext embedded in responses can have users "`click around`" in 37 | your API. If you embed the possible operations as links in responses, 38 | a client can use returned data to learn what it can do with the data. 39 | Roy T. Fielding's PhD thesis covers this in great detail. 40 | 41 | GraphQL doesn't implement HATEOAS, but it gets fairly close to the 42 | idea. Given that a GraphQL query can be introspected, you can 43 | gradually learn about the interface as a client and utilize that 44 | interface. In practice however, it is common to lock down the possible 45 | queries for a given client, in order to protect the system and get 46 | security. 47 | 48 | [[context]] 49 | === Context 50 | 51 | The context map contains a number of base fields before the developers 52 | extends the context with their own fields. This section describes 53 | those fields and their purpose: 54 | 55 | * TBD 56 | 57 | [[cqrs]] 58 | === CQRS 59 | 60 | CQRS stands for Command-Query Responsibility Separation. The idea stems 61 | from the observation that querying data often have a different feel 62 | than commanding the system to do changes. So rather than trying to 63 | solve both in one interface, you slice the system such that you have a 64 | query-part which pertains only to querying data, and a command-part 65 | which pertains to mutating data. 66 | 67 | Often, the command section becomes a system based on an append-only 68 | event log in which command processors read events and make changes to 69 | the system. These changes are then made persistent and ready for 70 | query. 71 | 72 | The Query system is built with dynamic arbitrary queries in mind and 73 | is focused on this only. 74 | 75 | The splitting often helps larger system as they tend to have large 76 | differences in the Query part and the Command part. 77 | 78 | [[cursor]] 79 | 80 | === Cursor 81 | 82 | We often use the term "`cursor`" in this tutorial. Imagine that a 83 | GraphQL is rendered by moving a cursor around in the data set and then 84 | rendering each part of the query as the cursor moves around. As the 85 | cursor traverses (recursively) deeper into the data set, more parts of 86 | the query may be rendered on demand. 87 | 88 | In practice, the cursor can be executed in parallel. If you submit a 89 | `query` you must assume that rendering will happen in parallel when 90 | possible. In contrast, a `mutation` will always process the query 91 | serially one element at a time. This is to make sure changes for a 92 | given query are not interfering with each other. 93 | 94 | NOTE: A typical system has on the order of 100:1 queries to 95 | mutations. It is very likely your data is queried far more often than 96 | they are mutated. Thus, if you look to optimize, optimize for queries 97 | first, unless you happen to know you have a large amount of mutations. 98 | 99 | 100 | -------------------------------------------------------------------------------- /doc/tricks.asciidoc: -------------------------------------------------------------------------------- 1 | [[tricks]] 2 | 3 | == Tricks 4 | 5 | [[object-representation]] 6 | === Object Representation 7 | 8 | A rather useful representation of objects is to have some additional 9 | metadata on your object for use by the GraphQL system in addition to 10 | the base data fields which the client can request. 11 | 12 | .Maps 13 | 14 | If your object representation is a `map()`, you can add special fields 15 | into the map which is used by the GraphQL system. You can add those 16 | fields as you load the object from the backend database, in order to 17 | make it easier to work with later. In Erlang systems, due to 18 | immutability, a pointer to some static data is essentially free, as 19 | long as terms share the same base value. So don't be afraid to add 20 | some metadata on your object. 21 | 22 | A common convention is to use a special atom such as 23 | `'$tag'`.footnote:[In Erlang/OTP 20.0 and onward, atoms can be 24 | arbitrary Unicode data, so you can pick a special symbol to mark 25 | special atoms] You can then add data under that key in the map which 26 | is useful to the GraphQL backend only. 27 | 28 | In addition, our convention is that fields which _must_ be derived 29 | begin with an underscore (e.g., `_images`). This makes it clear to the 30 | reader that the data is not isosmurfically mappable into the Graph but 31 | requires some kind of transformation. 32 | 33 | .Wrappers 34 | 35 | Rather than represent an object as a record such as `#starship{}` you 36 | represent the data as a wrapped term: `{#starship{} = Ship, MetaData}` 37 | and then you write your execution function such that it operates on 38 | the wrapped term rather than the raw `Ship`. This has the advantage of 39 | keeping the data separate from the raw plain data object. The 40 | sacrifice, though, is you have to do more work in your object 41 | resolution code. 42 | 43 | [[non-isomorphism]] 44 | === Avoid Isomorphic representations 45 | 46 | A common want when designing API systems is to avoid the need for 47 | continual translation of backend data to the GraphQL schema. A 48 | common solution to this problem is to make the database schema 1-1 49 | with the GraphQL schema, often called an isomorphic 50 | representation.footnote:[Isomorphic stems from ancient greek and means 51 | "`equal shape`"] However, our experience is that such a 1-1 mapping is 52 | detrimental to the development of the system. It is common the GraphQL 53 | schema and the underlying data evolve at different paces and that new 54 | data sources are added as you go along. 55 | 56 | Thus, a piece of advice is to know when to break from the 1-1 mapping 57 | and build your own translation layer in order to handle the gradual 58 | evolution of the database schema and the GraphQL contract. In general, 59 | you shouldn't be afraid of breaking the isomorphic representation if 60 | that turns out to help you define your system in a better way. On the 61 | flip side, inventing new terminology and names shouldn't in general be 62 | done for the sake of doing so. The advantage of having an isomorphism 63 | between the contract and the database is that you don't have to 64 | explain to people what the mapping means. 65 | 66 | * Look out for the situation where a simple change in the contract 67 | starts an avalanche of changes all throughout your stack. This tends 68 | to mean you have built a system where each layer transforms the 69 | data. Keep transformers down to as few layers as possible and let 70 | the end-points in the data passing handle the transformations. 71 | * Large systems constantly change. Have some place in the code where 72 | you can insert a temporary stub or plug while you change other parts 73 | of the system. It is not generally possible to switch a system in 74 | one go as soon as it becomes large. By having a stub/plug you can 75 | gradually change the large system rather than having to change 76 | everything at once. 77 | 78 | [[middleware-stacks]] 79 | === Middleware stacks 80 | 81 | In many larger HTTP systems, it is common to have a "`middleware 82 | stack`". In a middleware stack, there is a section of code which is 83 | run for every request to the system. It is often used for a number of 84 | different cases: 85 | 86 | * Authentication of clients if they provide authentication tokens. 87 | * Proxy setup by coercing the underlying IP addresses from proxy-added 88 | headers. 89 | * IP Blacklist handling 90 | * Request limitation systems 91 | * Metric gathering for requests 92 | * CORS--Cross-Origin Resource Sharing 93 | 94 | In a GraphQL, these concerns tend to split into two groups: 95 | 96 | Contextual middleware:: This is run before the GraphQL execution 97 | begins. 98 | Mutation middleware:: This is run as part of a mutation. 99 | 100 | In {project} we decided to create a system in which middleware 101 | handling is put into the hands of the programmer outside of the 102 | GraphQL system. There is no way to "`inject`" middlewares in the 103 | system. Rather we handle the stack by providing functionality which 104 | allows the programmer to write their own equivalent. 105 | 106 | The reason is we recognize how these stacks tend to be application 107 | specific and also tend to be changing a lot. So by keeping them 108 | outside the {project} itself we avoid having to cater for them all the 109 | time. 110 | 111 | ==== The Context 112 | 113 | Many parts of the execution depends on the transport in which we run. 114 | An HTTP transport will have different handling than a raw TCP socket 115 | on which we exchange protocol buffers, a Kafka topic, or a RabbitMQ 116 | broker for instance. 117 | 118 | Things such as CORS and authentication is usually handled on the 119 | transport. You then setup of extra parameters for the context and 120 | start GraphQL execution with that added context. We tend to use a 121 | field `#{ auth_context => Auth } = Ctx` inside the context for 122 | authentication. Now, when your GraphQL query executes, it has access 123 | to authentication data and can act accordingly. 124 | 125 | ==== The Mutations 126 | 127 | For mutations, we like to write the execution function such that it 128 | handles all fields at the top level. This allows you to use typical 129 | Erlang function calls to build up a stack. At the very bottom of the 130 | stack you dispatch on the `Field` in `execute/4` <> 131 | to handle each mutation. The function calls allows you to manipulate 132 | the `Ctx` and `Args` with further information as you process the 133 | mutation. 134 | 135 | --- 136 | 137 | We've found these two tricks to be adequate for all of our handling. 138 | Note that the context is immutable in a query. We don't in general 139 | allow the context to be manipulated by the queries. If you need to 140 | "`pass down`" extra data, embed it into the *Obj* which is returned 141 | such that when the cursor moves down to the object, you can inspect it 142 | for data. 143 | 144 | Say you want to protect certain fields on an object based on auth. 145 | When you load the object, you can mask out fields the auth-context 146 | doesn't have access to by replacing them with an atom such as 147 | `access_denied`. Then you write a function: 148 | 149 | [source,erlang] 150 | ---- 151 | access(Obj, Field) -> 152 | case maps:get(Obj, Field, not_found) of 153 | not_found -> {ok, null}; % Or appropriate answer 154 | access_denied -> {error, access_denied}; % or perhaps {ok, null} 155 | Val -> {ok, Val} 156 | end. 157 | ---- 158 | 159 | More advanced solutions are possible and are covered in the sections 160 | of <> and <>. 161 | 162 | === Data Loader 163 | 164 | TBD 165 | 166 | === Fragments 167 | 168 | TBD 169 | 170 | [[lazy-evaluation]] 171 | === Lazy Evalution 172 | 173 | If you have data where computation is circular, you will have to make 174 | sure you don't build an infinite loop in the data. This system has 175 | support for lazy evaluation, but you will have to write it yourself 176 | and handle it in your side. GraphQL provides the facilities, but not 177 | the solution here. 178 | 179 | When you return an object to GraphQL, you can return any data. Further 180 | recursion into the query will then call execution functions on the 181 | underlying data. If you return an object such as 182 | 183 | [source, erlang] 184 | ---- 185 | {ok, #{ <<"fieldName">> => {'$lazy', fun() -> Expr end}, ...}} 186 | ---- 187 | 188 | you delay the computation of `Expr` because it is wrapped in a 189 | function. Now, when you actually hit the field, in another execute 190 | function, you can handle the lazy node by evaluating it when the field 191 | is hit: 192 | 193 | [source, erlang] 194 | ---- 195 | execute(Ctx, #{ <<"fieldName">> := Field }, <<"fieldName">>, Args) -> 196 | {'$lazy', Thunk} = Field, 197 | Thunk(); 198 | ... 199 | ---- 200 | 201 | This ensures you only force/unroll the computation if the field is 202 | actually invoked, and you obtain lazy evaluation over the Graph. 203 | 204 | .Uses 205 | 206 | The method is useful in the case where your data is naturally cyclic, 207 | but where any query has a limited depth. By delaying computation, you 208 | will only force the computation the necessary amount of times, rather 209 | than eagerly entering an infinite loop. 210 | 211 | Another common use case is when some parts of your computation is 212 | known when you build the initial object, but the computation of the 213 | content is expensive. By delaying the computation itself inside a 214 | thunk, you only compute that part if it turns out to be necessary. 215 | 216 | -------------------------------------------------------------------------------- /doc/type_resolution.asciidoc: -------------------------------------------------------------------------------- 1 | [[type-resolution]] 2 | == Type Resolution 3 | 4 | In GraphQL, certain types are _abstract_. These are interfaces and 5 | unions. When the GraphQL system encounters an abstract type, it must 6 | have a way to _resolve_ those abstract types into concrete (output) 7 | types. This is handled by the type resolution mapping. 8 | 9 | The executor of GraphQL queries uses the type resolver when it wants 10 | to make an abstract object concrete. The executor can then continue 11 | working with the concretized object and thus determine if fragments 12 | should expand and so on. 13 | 14 | A type resolver takes an Erlang term as input and provides a resolved 15 | type as output: 16 | 17 | [source,erlang] 18 | ---- 19 | -spec execute(Term) -> {ok, Type} | {error, Reason} 20 | when 21 | Term :: term(), 22 | Type :: atom(), 23 | Reason :: term(). 24 | ---- 25 | 26 | The `Term` is often some piece of data loaded from a database, but it 27 | can be any representation of data in the Erlang system. The purpose of 28 | the `execute/1` function is to analyze that data and return what type 29 | it belongs to (as an atom). In our case, we can assume resolution 30 | works on Mnesia objects. Hence, by matching on Mnesia objects, we can 31 | resolve the type in the Graph of those objects (file: `sw_core_type.erl`): 32 | 33 | [source,erlang] 34 | ---- 35 | include::{sw_core}/src/sw_core_type.erl[tags=resolveType] 36 | ---- 37 | 38 | TIP: In larger implementations, you often use multiple type resolvers 39 | and use the mapping rules to handle different abstract types via 40 | different resolvers. Also, type resolution is likely to forward 41 | decisions to other modules and merely act as a dispatch layer for the 42 | real code. The current implementation allows for a great deal of 43 | flexibility for this reason. 44 | 45 | Use pattern matching in the `execute/1` function to vary what kinds of 46 | data you can process. Do not be afraid to wrap your objects into other 47 | objects if that makes it easier to process. Since you can handle any 48 | Erlang term, you can often wrap your objects in a map of metadata and 49 | use the metadata for figuring out the type of the object. See 50 | <> for a discussion of commonly used variants. 51 | 52 | 53 | -------------------------------------------------------------------------------- /doc/why_graphql.asciidoc: -------------------------------------------------------------------------------- 1 | [[why_graphql]] 2 | 3 | == Why GraphQL 4 | 5 | A worthy question to ask is "`Why GraphQL?`" 6 | 7 | GraphQL is a natural extension of what we are already doing on the 8 | Web. As our systems grow, we start realizing our systems become 9 | gradually more heterogeneous in the sense that data becomes more 10 | complex and data gets more variance. 11 | 12 | In addition--since we usually have a single API serving multiple 13 | different clients, written in different languages for different 14 | platforms--we need to be flexible in query support. Clients are likely 15 | to evolve dynamically, non-linearly, and at different paces. Thus, the 16 | backend must support evolution while retaining backwards 17 | compatibility. Also, we must have a contract or protocol between the 18 | clients and the server that is standardized. Otherwise, we end up 19 | inventing our own system again and again, and this is a strenuous 20 | affair which has little to no reuse between systems. 21 | 22 | The defining characteristic of GraphQL is that the system is 23 | client-focused and client-centric. Data is driven by the client of the 24 | system, and not by the server. The consequence is that the 25 | delivery-time for features tend to be shorter. As soon as the product 26 | knows what change to make, it can often be handled with less 27 | server-side interaction than normally. Especially for the case where 28 | you are recombining existing data into a new view. 29 | 30 | RESTful APIs have served us well for a long time. And they are likely 31 | to continue serving as well in a large number of situations. However, 32 | if you have a system requiring more complex interaction, chances are 33 | you are better off by taking the plunge and switching your system to 34 | GraphQL. 35 | 36 | .Efficiency 37 | 38 | RESTful APIs recently got a powerful improvement in HTTP/2 which 39 | allows RESTful APIs to pipeline far better than what they did earlier. 40 | However, you still pay the round trip time between data dependencies 41 | in an HTTP/2 setting: You need the listing of keys before you can 42 | start requesting the data objects on those keys. In contrast, GraphQL 43 | queries tend to be a single round-trip only. A full declarative query 44 | is formulated and executed, without the need of any intermediate 45 | query. This means faster response times. Even in the case where a 46 | single query becomes slower since there is no need for followup 47 | queries. 48 | 49 | A major (subtle) insight is that in a GraphQL server, you don't have 50 | to hand-code the looping constructs which tend to be present in a lot 51 | of RESTful APIs. To avoid the round-trip describes in the preceding 52 | paragraph, you often resolve to a solution where a specialized 53 | optimized query is constructed and added to the system. This 54 | specialized endpoint is then looping over the data in one go so you 55 | avoid having to do multiple round-trips. 56 | 57 | In a GraphQL system, that looping is handled once-and-for-all by the 58 | GraphQL engine. You are only implementing callbacks that run as part 59 | of the loop. A lot of tedious code is then handled by GraphQL and we 60 | avoid having to code this again and again for each RESTful web service 61 | we write. 62 | 63 | .Porting 64 | 65 | You can often move your system onto GraphQL a bit at a time. You don't 66 | have to port every endpoint in the beginning. Often, people add some 67 | kind of field, `previousId` say, which is used as an identifier in the 68 | old system. Then you can gradually take over data from an old system 69 | and port it on top of GraphQL. Once the ball is rolling, it is likely 70 | that more and more clients want to use it, as it is a easier interface 71 | for them to use. 72 | -------------------------------------------------------------------------------- /fixtures/films.json: -------------------------------------------------------------------------------- 1 | [ 2 | { 3 | "fields": { 4 | "starships": [ 5 | 2, 6 | 3, 7 | 5, 8 | 9, 9 | 10, 10 | 11, 11 | 12, 12 | 13 13 | ], 14 | "edited": "2014-12-20T19:49:45.256Z", 15 | "vehicles": [ 16 | 4, 17 | 6, 18 | 7, 19 | 8 20 | ], 21 | "planets": [ 22 | 1, 23 | 2, 24 | 3 25 | ], 26 | "producer": "Gary Kurtz, Rick McCallum", 27 | "title": "A New Hope", 28 | "created": "2014-12-10T14:23:31.880Z", 29 | "episode_id": 4, 30 | "director": "George Lucas", 31 | "release_date": "1977-05-25", 32 | "opening_crawl": "It is a period of civil war.\r\nRebel spaceships, striking\r\nfrom a hidden base, have won\r\ntheir first victory against\r\nthe evil Galactic Empire.\r\n\r\nDuring the battle, Rebel\r\nspies managed to steal secret\r\nplans to the Empire's\r\nultimate weapon, the DEATH\r\nSTAR, an armored space\r\nstation with enough power\r\nto destroy an entire planet.\r\n\r\nPursued by the Empire's\r\nsinister agents, Princess\r\nLeia races home aboard her\r\nstarship, custodian of the\r\nstolen plans that can save her\r\npeople and restore\r\nfreedom to the galaxy....", 33 | "characters": [ 34 | 1, 35 | 2, 36 | 3, 37 | 4, 38 | 5, 39 | 6, 40 | 7, 41 | 8, 42 | 9, 43 | 10, 44 | 12, 45 | 13, 46 | 14, 47 | 15, 48 | 16, 49 | 18, 50 | 19, 51 | 81 52 | ], 53 | "species": [ 54 | 1, 55 | 2, 56 | 3, 57 | 4, 58 | 5 59 | ] 60 | }, 61 | "model": "resources.film", 62 | "pk": 1 63 | }, 64 | { 65 | "fields": { 66 | "starships": [ 67 | 3, 68 | 10, 69 | 11, 70 | 12, 71 | 15, 72 | 17, 73 | 21, 74 | 22, 75 | 23 76 | ], 77 | "edited": "2014-12-15T13:07:53.386Z", 78 | "vehicles": [ 79 | 8, 80 | 14, 81 | 16, 82 | 18, 83 | 19, 84 | 20 85 | ], 86 | "planets": [ 87 | 4, 88 | 5, 89 | 6, 90 | 27 91 | ], 92 | "producer": "Gary Kurtz, Rick McCallum", 93 | "title": "The Empire Strikes Back", 94 | "created": "2014-12-12T11:26:24.656Z", 95 | "episode_id": 5, 96 | "director": "Irvin Kershner", 97 | "release_date": "1980-05-17", 98 | "opening_crawl": "It is a dark time for the\r\nRebellion. Although the Death\r\nStar has been destroyed,\r\nImperial troops have driven the\r\nRebel forces from their hidden\r\nbase and pursued them across\r\nthe galaxy.\r\n\r\nEvading the dreaded Imperial\r\nStarfleet, a group of freedom\r\nfighters led by Luke Skywalker\r\nhas established a new secret\r\nbase on the remote ice world\r\nof Hoth.\r\n\r\nThe evil lord Darth Vader,\r\nobsessed with finding young\r\nSkywalker, has dispatched\r\nthousands of remote probes into\r\nthe far reaches of space....", 99 | "characters": [ 100 | 1, 101 | 2, 102 | 3, 103 | 4, 104 | 5, 105 | 10, 106 | 13, 107 | 14, 108 | 18, 109 | 20, 110 | 21, 111 | 22, 112 | 23, 113 | 24, 114 | 25, 115 | 26 116 | ], 117 | "species": [ 118 | 1, 119 | 2, 120 | 3, 121 | 6, 122 | 7 123 | ] 124 | }, 125 | "model": "resources.film", 126 | "pk": 2 127 | }, 128 | { 129 | "fields": { 130 | "starships": [ 131 | 2, 132 | 3, 133 | 10, 134 | 11, 135 | 12, 136 | 15, 137 | 17, 138 | 22, 139 | 23, 140 | 27, 141 | 28, 142 | 29 143 | ], 144 | "edited": "2014-12-20T09:48:37.462Z", 145 | "vehicles": [ 146 | 8, 147 | 16, 148 | 18, 149 | 19, 150 | 24, 151 | 25, 152 | 26, 153 | 30 154 | ], 155 | "planets": [ 156 | 1, 157 | 5, 158 | 7, 159 | 8, 160 | 9 161 | ], 162 | "producer": "Howard G. Kazanjian, George Lucas, Rick McCallum", 163 | "title": "Return of the Jedi", 164 | "created": "2014-12-18T10:39:33.255Z", 165 | "episode_id": 6, 166 | "director": "Richard Marquand", 167 | "release_date": "1983-05-25", 168 | "opening_crawl": "Luke Skywalker has returned to\r\nhis home planet of Tatooine in\r\nan attempt to rescue his\r\nfriend Han Solo from the\r\nclutches of the vile gangster\r\nJabba the Hutt.\r\n\r\nLittle does Luke know that the\r\nGALACTIC EMPIRE has secretly\r\nbegun construction on a new\r\narmored space station even\r\nmore powerful than the first\r\ndreaded Death Star.\r\n\r\nWhen completed, this ultimate\r\nweapon will spell certain doom\r\nfor the small band of rebels\r\nstruggling to restore freedom\r\nto the galaxy...", 169 | "characters": [ 170 | 1, 171 | 2, 172 | 3, 173 | 4, 174 | 5, 175 | 10, 176 | 13, 177 | 14, 178 | 16, 179 | 18, 180 | 20, 181 | 21, 182 | 22, 183 | 25, 184 | 27, 185 | 28, 186 | 29, 187 | 30, 188 | 31, 189 | 45 190 | ], 191 | "species": [ 192 | 1, 193 | 2, 194 | 3, 195 | 5, 196 | 6, 197 | 8, 198 | 9, 199 | 10, 200 | 15 201 | ] 202 | }, 203 | "model": "resources.film", 204 | "pk": 3 205 | }, 206 | { 207 | "fields": { 208 | "starships": [ 209 | 31, 210 | 32, 211 | 39, 212 | 40, 213 | 41 214 | ], 215 | "edited": "2014-12-20T10:54:07.216Z", 216 | "vehicles": [ 217 | 33, 218 | 34, 219 | 35, 220 | 36, 221 | 37, 222 | 38, 223 | 42 224 | ], 225 | "planets": [ 226 | 1, 227 | 8, 228 | 9 229 | ], 230 | "producer": "Rick McCallum", 231 | "title": "The Phantom Menace", 232 | "created": "2014-12-19T16:52:55.740Z", 233 | "episode_id": 1, 234 | "director": "George Lucas", 235 | "release_date": "1999-05-19", 236 | "opening_crawl": "Turmoil has engulfed the\r\nGalactic Republic. The taxation\r\nof trade routes to outlying star\r\nsystems is in dispute.\r\n\r\nHoping to resolve the matter\r\nwith a blockade of deadly\r\nbattleships, the greedy Trade\r\nFederation has stopped all\r\nshipping to the small planet\r\nof Naboo.\r\n\r\nWhile the Congress of the\r\nRepublic endlessly debates\r\nthis alarming chain of events,\r\nthe Supreme Chancellor has\r\nsecretly dispatched two Jedi\r\nKnights, the guardians of\r\npeace and justice in the\r\ngalaxy, to settle the conflict....", 237 | "characters": [ 238 | 2, 239 | 3, 240 | 10, 241 | 11, 242 | 16, 243 | 20, 244 | 21, 245 | 32, 246 | 33, 247 | 34, 248 | 35, 249 | 36, 250 | 37, 251 | 38, 252 | 39, 253 | 40, 254 | 41, 255 | 42, 256 | 43, 257 | 44, 258 | 46, 259 | 47, 260 | 48, 261 | 49, 262 | 50, 263 | 51, 264 | 52, 265 | 53, 266 | 54, 267 | 55, 268 | 56, 269 | 57, 270 | 58, 271 | 59 272 | ], 273 | "species": [ 274 | 1, 275 | 2, 276 | 6, 277 | 11, 278 | 12, 279 | 13, 280 | 14, 281 | 15, 282 | 16, 283 | 17, 284 | 18, 285 | 19, 286 | 20, 287 | 21, 288 | 22, 289 | 23, 290 | 24, 291 | 25, 292 | 26, 293 | 27 294 | ] 295 | }, 296 | "model": "resources.film", 297 | "pk": 4 298 | }, 299 | { 300 | "fields": { 301 | "starships": [ 302 | 21, 303 | 32, 304 | 39, 305 | 43, 306 | 47, 307 | 48, 308 | 49, 309 | 52, 310 | 58 311 | ], 312 | "edited": "2014-12-20T20:18:48.516Z", 313 | "vehicles": [ 314 | 4, 315 | 44, 316 | 45, 317 | 46, 318 | 50, 319 | 51, 320 | 53, 321 | 54, 322 | 55, 323 | 56, 324 | 57 325 | ], 326 | "planets": [ 327 | 1, 328 | 8, 329 | 9, 330 | 10, 331 | 11 332 | ], 333 | "producer": "Rick McCallum", 334 | "title": "Attack of the Clones", 335 | "created": "2014-12-20T10:57:57.886Z", 336 | "episode_id": 2, 337 | "director": "George Lucas", 338 | "release_date": "2002-05-16", 339 | "opening_crawl": "There is unrest in the Galactic\r\nSenate. Several thousand solar\r\nsystems have declared their\r\nintentions to leave the Republic.\r\n\r\nThis separatist movement,\r\nunder the leadership of the\r\nmysterious Count Dooku, has\r\nmade it difficult for the limited\r\nnumber of Jedi Knights to maintain \r\npeace and order in the galaxy.\r\n\r\nSenator Amidala, the former\r\nQueen of Naboo, is returning\r\nto the Galactic Senate to vote\r\non the critical issue of creating\r\nan ARMY OF THE REPUBLIC\r\nto assist the overwhelmed\r\nJedi....", 340 | "characters": [ 341 | 2, 342 | 3, 343 | 6, 344 | 7, 345 | 10, 346 | 11, 347 | 20, 348 | 21, 349 | 22, 350 | 33, 351 | 35, 352 | 36, 353 | 40, 354 | 43, 355 | 46, 356 | 51, 357 | 52, 358 | 53, 359 | 58, 360 | 59, 361 | 60, 362 | 61, 363 | 62, 364 | 63, 365 | 64, 366 | 65, 367 | 66, 368 | 67, 369 | 68, 370 | 69, 371 | 70, 372 | 71, 373 | 72, 374 | 73, 375 | 74, 376 | 75, 377 | 76, 378 | 77, 379 | 78, 380 | 82 381 | ], 382 | "species": [ 383 | 1, 384 | 2, 385 | 6, 386 | 12, 387 | 13, 388 | 15, 389 | 28, 390 | 29, 391 | 30, 392 | 31, 393 | 32, 394 | 33, 395 | 34, 396 | 35 397 | ] 398 | }, 399 | "model": "resources.film", 400 | "pk": 5 401 | }, 402 | { 403 | "fields": { 404 | "starships": [ 405 | 2, 406 | 32, 407 | 48, 408 | 59, 409 | 61, 410 | 63, 411 | 64, 412 | 65, 413 | 66, 414 | 68, 415 | 74, 416 | 75 417 | ], 418 | "edited": "2014-12-20T20:47:52.073Z", 419 | "vehicles": [ 420 | 33, 421 | 50, 422 | 53, 423 | 56, 424 | 60, 425 | 62, 426 | 67, 427 | 69, 428 | 70, 429 | 71, 430 | 72, 431 | 73, 432 | 76 433 | ], 434 | "planets": [ 435 | 1, 436 | 2, 437 | 5, 438 | 8, 439 | 9, 440 | 12, 441 | 13, 442 | 14, 443 | 15, 444 | 16, 445 | 17, 446 | 18, 447 | 19 448 | ], 449 | "producer": "Rick McCallum", 450 | "title": "Revenge of the Sith", 451 | "created": "2014-12-20T18:49:38.403Z", 452 | "episode_id": 3, 453 | "director": "George Lucas", 454 | "release_date": "2005-05-19", 455 | "opening_crawl": "War! The Republic is crumbling\r\nunder attacks by the ruthless\r\nSith Lord, Count Dooku.\r\nThere are heroes on both sides.\r\nEvil is everywhere.\r\n\r\nIn a stunning move, the\r\nfiendish droid leader, General\r\nGrievous, has swept into the\r\nRepublic capital and kidnapped\r\nChancellor Palpatine, leader of\r\nthe Galactic Senate.\r\n\r\nAs the Separatist Droid Army\r\nattempts to flee the besieged\r\ncapital with their valuable\r\nhostage, two Jedi Knights lead a\r\ndesperate mission to rescue the\r\ncaptive Chancellor....", 456 | "characters": [ 457 | 1, 458 | 2, 459 | 3, 460 | 4, 461 | 5, 462 | 6, 463 | 7, 464 | 10, 465 | 11, 466 | 12, 467 | 13, 468 | 20, 469 | 21, 470 | 33, 471 | 35, 472 | 46, 473 | 51, 474 | 52, 475 | 53, 476 | 54, 477 | 55, 478 | 56, 479 | 58, 480 | 63, 481 | 64, 482 | 67, 483 | 68, 484 | 75, 485 | 78, 486 | 79, 487 | 80, 488 | 81, 489 | 82, 490 | 83 491 | ], 492 | "species": [ 493 | 1, 494 | 2, 495 | 3, 496 | 6, 497 | 15, 498 | 19, 499 | 20, 500 | 23, 501 | 24, 502 | 25, 503 | 26, 504 | 27, 505 | 28, 506 | 29, 507 | 30, 508 | 33, 509 | 34, 510 | 35, 511 | 36, 512 | 37 513 | ] 514 | }, 515 | "model": "resources.film", 516 | "pk": 6 517 | } 518 | ] 519 | -------------------------------------------------------------------------------- /fixtures/starships.json: -------------------------------------------------------------------------------- 1 | [ 2 | { 3 | "fields": { 4 | "pilots": [], 5 | "MGLT": "60", 6 | "starship_class": "corvette", 7 | "hyperdrive_rating": "2.0" 8 | }, 9 | "model": "resources.starship", 10 | "pk": 2 11 | }, 12 | { 13 | "fields": { 14 | "pilots": [], 15 | "MGLT": "60", 16 | "starship_class": "Star Destroyer", 17 | "hyperdrive_rating": "2.0" 18 | }, 19 | "model": "resources.starship", 20 | "pk": 3 21 | }, 22 | { 23 | "fields": { 24 | "pilots": [], 25 | "MGLT": "70", 26 | "starship_class": "landing craft", 27 | "hyperdrive_rating": "1.0" 28 | }, 29 | "model": "resources.starship", 30 | "pk": 5 31 | }, 32 | { 33 | "fields": { 34 | "pilots": [], 35 | "MGLT": "10", 36 | "starship_class": "Deep Space Mobile Battlestation", 37 | "hyperdrive_rating": "4.0" 38 | }, 39 | "model": "resources.starship", 40 | "pk": 9 41 | }, 42 | { 43 | "fields": { 44 | "pilots": [ 45 | 13, 46 | 14, 47 | 25, 48 | 31 49 | ], 50 | "MGLT": "75", 51 | "starship_class": "Light freighter", 52 | "hyperdrive_rating": "0.5" 53 | }, 54 | "model": "resources.starship", 55 | "pk": 10 56 | }, 57 | { 58 | "fields": { 59 | "pilots": [], 60 | "MGLT": "80", 61 | "starship_class": "assault starfighter", 62 | "hyperdrive_rating": "1.0" 63 | }, 64 | "model": "resources.starship", 65 | "pk": 11 66 | }, 67 | { 68 | "fields": { 69 | "pilots": [ 70 | 1, 71 | 9, 72 | 18, 73 | 19 74 | ], 75 | "MGLT": "100", 76 | "starship_class": "Starfighter", 77 | "hyperdrive_rating": "1.0" 78 | }, 79 | "model": "resources.starship", 80 | "pk": 12 81 | }, 82 | { 83 | "fields": { 84 | "pilots": [ 85 | 4 86 | ], 87 | "MGLT": "105", 88 | "starship_class": "Starfighter", 89 | "hyperdrive_rating": "1.0" 90 | }, 91 | "model": "resources.starship", 92 | "pk": 13 93 | }, 94 | { 95 | "fields": { 96 | "pilots": [], 97 | "MGLT": "40", 98 | "starship_class": "Star dreadnought", 99 | "hyperdrive_rating": "2.0" 100 | }, 101 | "model": "resources.starship", 102 | "pk": 15 103 | }, 104 | { 105 | "fields": { 106 | "pilots": [], 107 | "MGLT": "20", 108 | "starship_class": "Medium transport", 109 | "hyperdrive_rating": "4.0" 110 | }, 111 | "model": "resources.starship", 112 | "pk": 17 113 | }, 114 | { 115 | "fields": { 116 | "pilots": [ 117 | 22 118 | ], 119 | "MGLT": "70", 120 | "starship_class": "Patrol craft", 121 | "hyperdrive_rating": "3.0" 122 | }, 123 | "model": "resources.starship", 124 | "pk": 21 125 | }, 126 | { 127 | "fields": { 128 | "pilots": [ 129 | 1, 130 | 13, 131 | 14 132 | ], 133 | "MGLT": "50", 134 | "starship_class": "Armed government transport", 135 | "hyperdrive_rating": "1.0" 136 | }, 137 | "model": "resources.starship", 138 | "pk": 22 139 | }, 140 | { 141 | "fields": { 142 | "pilots": [], 143 | "MGLT": "40", 144 | "starship_class": "Escort ship", 145 | "hyperdrive_rating": "2.0" 146 | }, 147 | "model": "resources.starship", 148 | "pk": 23 149 | }, 150 | { 151 | "fields": { 152 | "pilots": [], 153 | "MGLT": "60", 154 | "starship_class": "Star Cruiser", 155 | "hyperdrive_rating": "1.0" 156 | }, 157 | "model": "resources.starship", 158 | "pk": 27 159 | }, 160 | { 161 | "fields": { 162 | "pilots": [ 163 | 29 164 | ], 165 | "MGLT": "120", 166 | "starship_class": "Starfighter", 167 | "hyperdrive_rating": "1.0" 168 | }, 169 | "model": "resources.starship", 170 | "pk": 28 171 | }, 172 | { 173 | "fields": { 174 | "pilots": [], 175 | "MGLT": "91", 176 | "starship_class": "Assault Starfighter", 177 | "hyperdrive_rating": "2.0" 178 | }, 179 | "model": "resources.starship", 180 | "pk": 29 181 | }, 182 | { 183 | "fields": { 184 | "pilots": [], 185 | "MGLT": "unknown", 186 | "starship_class": "Space cruiser", 187 | "hyperdrive_rating": "2.0" 188 | }, 189 | "model": "resources.starship", 190 | "pk": 31 191 | }, 192 | { 193 | "fields": { 194 | "pilots": [], 195 | "MGLT": "unknown", 196 | "starship_class": "Droid control ship", 197 | "hyperdrive_rating": "2.0" 198 | }, 199 | "model": "resources.starship", 200 | "pk": 32 201 | }, 202 | { 203 | "fields": { 204 | "pilots": [ 205 | 11, 206 | 35, 207 | 60 208 | ], 209 | "MGLT": "unknown", 210 | "starship_class": "Starfighter", 211 | "hyperdrive_rating": "1.0" 212 | }, 213 | "model": "resources.starship", 214 | "pk": 39 215 | }, 216 | { 217 | "fields": { 218 | "pilots": [ 219 | 39 220 | ], 221 | "MGLT": "unknown", 222 | "starship_class": "yacht", 223 | "hyperdrive_rating": "1.8" 224 | }, 225 | "model": "resources.starship", 226 | "pk": 40 227 | }, 228 | { 229 | "fields": { 230 | "pilots": [ 231 | 44 232 | ], 233 | "MGLT": "unknown", 234 | "starship_class": "Space Transport", 235 | "hyperdrive_rating": "1.5" 236 | }, 237 | "model": "resources.starship", 238 | "pk": 41 239 | }, 240 | { 241 | "fields": { 242 | "pilots": [], 243 | "MGLT": "unknown", 244 | "starship_class": "Diplomatic barge", 245 | "hyperdrive_rating": "0.7" 246 | }, 247 | "model": "resources.starship", 248 | "pk": 43 249 | }, 250 | { 251 | "fields": { 252 | "pilots": [], 253 | "MGLT": "unknown", 254 | "starship_class": "freighter", 255 | "hyperdrive_rating": "unknown" 256 | }, 257 | "model": "resources.starship", 258 | "pk": 47 259 | }, 260 | { 261 | "fields": { 262 | "pilots": [ 263 | 10, 264 | 58 265 | ], 266 | "MGLT": "unknown", 267 | "starship_class": "Starfighter", 268 | "hyperdrive_rating": "1.0" 269 | }, 270 | "model": "resources.starship", 271 | "pk": 48 272 | }, 273 | { 274 | "fields": { 275 | "pilots": [ 276 | 35 277 | ], 278 | "MGLT": "unknown", 279 | "starship_class": "yacht", 280 | "hyperdrive_rating": "0.9" 281 | }, 282 | "model": "resources.starship", 283 | "pk": 49 284 | }, 285 | { 286 | "fields": { 287 | "pilots": [], 288 | "MGLT": "unknown", 289 | "starship_class": "assault ship", 290 | "hyperdrive_rating": "0.6" 291 | }, 292 | "model": "resources.starship", 293 | "pk": 52 294 | }, 295 | { 296 | "fields": { 297 | "pilots": [], 298 | "MGLT": "unknown", 299 | "starship_class": "yacht", 300 | "hyperdrive_rating": "1.5" 301 | }, 302 | "model": "resources.starship", 303 | "pk": 58 304 | }, 305 | { 306 | "fields": { 307 | "pilots": [ 308 | 10, 309 | 11 310 | ], 311 | "MGLT": "unknown", 312 | "starship_class": "capital ship", 313 | "hyperdrive_rating": "1.5" 314 | }, 315 | "model": "resources.starship", 316 | "pk": 59 317 | }, 318 | { 319 | "fields": { 320 | "pilots": [], 321 | "MGLT": "unknown", 322 | "starship_class": "transport", 323 | "hyperdrive_rating": "1.0" 324 | }, 325 | "model": "resources.starship", 326 | "pk": 61 327 | }, 328 | { 329 | "fields": { 330 | "pilots": [], 331 | "MGLT": "unknown", 332 | "starship_class": "star destroyer", 333 | "hyperdrive_rating": "1.0" 334 | }, 335 | "model": "resources.starship", 336 | "pk": 63 337 | }, 338 | { 339 | "fields": { 340 | "pilots": [ 341 | 10, 342 | 35 343 | ], 344 | "MGLT": "unknown", 345 | "starship_class": "yacht", 346 | "hyperdrive_rating": "0.5" 347 | }, 348 | "model": "resources.starship", 349 | "pk": 64 350 | }, 351 | { 352 | "fields": { 353 | "pilots": [ 354 | 10, 355 | 11 356 | ], 357 | "MGLT": "unknown", 358 | "starship_class": "starfighter", 359 | "hyperdrive_rating": "1.0" 360 | }, 361 | "model": "resources.starship", 362 | "pk": 65 363 | }, 364 | { 365 | "fields": { 366 | "pilots": [], 367 | "MGLT": "100", 368 | "starship_class": "starfighter", 369 | "hyperdrive_rating": "1.0" 370 | }, 371 | "model": "resources.starship", 372 | "pk": 66 373 | }, 374 | { 375 | "fields": { 376 | "pilots": [], 377 | "MGLT": "unknown", 378 | "starship_class": "cruiser", 379 | "hyperdrive_rating": "1.0" 380 | }, 381 | "model": "resources.starship", 382 | "pk": 68 383 | }, 384 | { 385 | "fields": { 386 | "pilots": [ 387 | 10, 388 | 79 389 | ], 390 | "MGLT": "unknown", 391 | "starship_class": "starfighter", 392 | "hyperdrive_rating": "6" 393 | }, 394 | "model": "resources.starship", 395 | "pk": 74 396 | }, 397 | { 398 | "fields": { 399 | "pilots": [], 400 | "MGLT": "unknown", 401 | "starship_class": "starfighter", 402 | "hyperdrive_rating": "1.0" 403 | }, 404 | "model": "resources.starship", 405 | "pk": 75 406 | } 407 | ] 408 | -------------------------------------------------------------------------------- /fixtures/vehicles.json: -------------------------------------------------------------------------------- 1 | [ 2 | { 3 | "fields": { 4 | "vehicle_class": "wheeled", 5 | "pilots": [] 6 | }, 7 | "model": "resources.vehicle", 8 | "pk": 4 9 | }, 10 | { 11 | "fields": { 12 | "vehicle_class": "repulsorcraft", 13 | "pilots": [] 14 | }, 15 | "model": "resources.vehicle", 16 | "pk": 6 17 | }, 18 | { 19 | "fields": { 20 | "vehicle_class": "repulsorcraft", 21 | "pilots": [] 22 | }, 23 | "model": "resources.vehicle", 24 | "pk": 7 25 | }, 26 | { 27 | "fields": { 28 | "vehicle_class": "starfighter", 29 | "pilots": [] 30 | }, 31 | "model": "resources.vehicle", 32 | "pk": 8 33 | }, 34 | { 35 | "fields": { 36 | "vehicle_class": "airspeeder", 37 | "pilots": [ 38 | 1, 39 | 18 40 | ] 41 | }, 42 | "model": "resources.vehicle", 43 | "pk": 14 44 | }, 45 | { 46 | "fields": { 47 | "vehicle_class": "space/planetary bomber", 48 | "pilots": [] 49 | }, 50 | "model": "resources.vehicle", 51 | "pk": 16 52 | }, 53 | { 54 | "fields": { 55 | "vehicle_class": "assault walker", 56 | "pilots": [] 57 | }, 58 | "model": "resources.vehicle", 59 | "pk": 18 60 | }, 61 | { 62 | "fields": { 63 | "vehicle_class": "walker", 64 | "pilots": [ 65 | 13 66 | ] 67 | }, 68 | "model": "resources.vehicle", 69 | "pk": 19 70 | }, 71 | { 72 | "fields": { 73 | "vehicle_class": "repulsorcraft", 74 | "pilots": [] 75 | }, 76 | "model": "resources.vehicle", 77 | "pk": 20 78 | }, 79 | { 80 | "fields": { 81 | "vehicle_class": "sail barge", 82 | "pilots": [] 83 | }, 84 | "model": "resources.vehicle", 85 | "pk": 24 86 | }, 87 | { 88 | "fields": { 89 | "vehicle_class": "repulsorcraft cargo skiff", 90 | "pilots": [] 91 | }, 92 | "model": "resources.vehicle", 93 | "pk": 25 94 | }, 95 | { 96 | "fields": { 97 | "vehicle_class": "starfighter", 98 | "pilots": [] 99 | }, 100 | "model": "resources.vehicle", 101 | "pk": 26 102 | }, 103 | { 104 | "fields": { 105 | "vehicle_class": "speeder", 106 | "pilots": [ 107 | 1, 108 | 5 109 | ] 110 | }, 111 | "model": "resources.vehicle", 112 | "pk": 30 113 | }, 114 | { 115 | "fields": { 116 | "vehicle_class": "starfighter", 117 | "pilots": [] 118 | }, 119 | "model": "resources.vehicle", 120 | "pk": 33 121 | }, 122 | { 123 | "fields": { 124 | "vehicle_class": "repulsorcraft", 125 | "pilots": [] 126 | }, 127 | "model": "resources.vehicle", 128 | "pk": 34 129 | }, 130 | { 131 | "fields": { 132 | "vehicle_class": "repulsorcraft", 133 | "pilots": [] 134 | }, 135 | "model": "resources.vehicle", 136 | "pk": 35 137 | }, 138 | { 139 | "fields": { 140 | "vehicle_class": "repulsorcraft", 141 | "pilots": [] 142 | }, 143 | "model": "resources.vehicle", 144 | "pk": 36 145 | }, 146 | { 147 | "fields": { 148 | "vehicle_class": "landing craft", 149 | "pilots": [] 150 | }, 151 | "model": "resources.vehicle", 152 | "pk": 37 153 | }, 154 | { 155 | "fields": { 156 | "vehicle_class": "submarine", 157 | "pilots": [ 158 | 10, 159 | 32 160 | ] 161 | }, 162 | "model": "resources.vehicle", 163 | "pk": 38 164 | }, 165 | { 166 | "fields": { 167 | "vehicle_class": "speeder", 168 | "pilots": [ 169 | 44 170 | ] 171 | }, 172 | "model": "resources.vehicle", 173 | "pk": 42 174 | }, 175 | { 176 | "fields": { 177 | "vehicle_class": "repulsorcraft", 178 | "pilots": [ 179 | 11 180 | ] 181 | }, 182 | "model": "resources.vehicle", 183 | "pk": 44 184 | }, 185 | { 186 | "fields": { 187 | "vehicle_class": "airspeeder", 188 | "pilots": [ 189 | 70 190 | ] 191 | }, 192 | "model": "resources.vehicle", 193 | "pk": 45 194 | }, 195 | { 196 | "fields": { 197 | "vehicle_class": "airspeeder", 198 | "pilots": [ 199 | 11 200 | ] 201 | }, 202 | "model": "resources.vehicle", 203 | "pk": 46 204 | }, 205 | { 206 | "fields": { 207 | "vehicle_class": "gunship", 208 | "pilots": [] 209 | }, 210 | "model": "resources.vehicle", 211 | "pk": 50 212 | }, 213 | { 214 | "fields": { 215 | "vehicle_class": "gunship", 216 | "pilots": [] 217 | }, 218 | "model": "resources.vehicle", 219 | "pk": 51 220 | }, 221 | { 222 | "fields": { 223 | "vehicle_class": "walker", 224 | "pilots": [] 225 | }, 226 | "model": "resources.vehicle", 227 | "pk": 53 228 | }, 229 | { 230 | "fields": { 231 | "vehicle_class": "walker", 232 | "pilots": [] 233 | }, 234 | "model": "resources.vehicle", 235 | "pk": 54 236 | }, 237 | { 238 | "fields": { 239 | "vehicle_class": "speeder", 240 | "pilots": [ 241 | 67 242 | ] 243 | }, 244 | "model": "resources.vehicle", 245 | "pk": 55 246 | }, 247 | { 248 | "fields": { 249 | "vehicle_class": "transport", 250 | "pilots": [] 251 | }, 252 | "model": "resources.vehicle", 253 | "pk": 56 254 | }, 255 | { 256 | "fields": { 257 | "vehicle_class": "starfighter", 258 | "pilots": [] 259 | }, 260 | "model": "resources.vehicle", 261 | "pk": 57 262 | }, 263 | { 264 | "fields": { 265 | "vehicle_class": "wheeled walker", 266 | "pilots": [ 267 | 79 268 | ] 269 | }, 270 | "model": "resources.vehicle", 271 | "pk": 60 272 | }, 273 | { 274 | "fields": { 275 | "vehicle_class": "fire suppression ship", 276 | "pilots": [] 277 | }, 278 | "model": "resources.vehicle", 279 | "pk": 62 280 | }, 281 | { 282 | "fields": { 283 | "vehicle_class": "droid starfighter", 284 | "pilots": [] 285 | }, 286 | "model": "resources.vehicle", 287 | "pk": 67 288 | }, 289 | { 290 | "fields": { 291 | "vehicle_class": "airspeeder", 292 | "pilots": [] 293 | }, 294 | "model": "resources.vehicle", 295 | "pk": 69 296 | }, 297 | { 298 | "fields": { 299 | "vehicle_class": "air speeder", 300 | "pilots": [] 301 | }, 302 | "model": "resources.vehicle", 303 | "pk": 70 304 | }, 305 | { 306 | "fields": { 307 | "vehicle_class": "wheeled walker", 308 | "pilots": [] 309 | }, 310 | "model": "resources.vehicle", 311 | "pk": 71 312 | }, 313 | { 314 | "fields": { 315 | "vehicle_class": "droid tank", 316 | "pilots": [] 317 | }, 318 | "model": "resources.vehicle", 319 | "pk": 72 320 | }, 321 | { 322 | "fields": { 323 | "vehicle_class": "airspeeder", 324 | "pilots": [] 325 | }, 326 | "model": "resources.vehicle", 327 | "pk": 73 328 | }, 329 | { 330 | "fields": { 331 | "vehicle_class": "walker", 332 | "pilots": [] 333 | }, 334 | "model": "resources.vehicle", 335 | "pk": 76 336 | } 337 | ] 338 | -------------------------------------------------------------------------------- /images/graphiql.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jlouis/graphql-erlang-tutorial/c2cebedf45e8b2bd3eb798977ebb72e13b9bfd05/images/graphiql.png -------------------------------------------------------------------------------- /rebar.config: -------------------------------------------------------------------------------- 1 | {erl_opts, [ 2 | warn_bif_clash, 3 | warn_deprecated_function, 4 | warn_export_all, 5 | warn_export_vars, 6 | warn_exported_vars, 7 | warn_format, 8 | warn_obsolete_guard, 9 | warn_shadow_vars, 10 | %%warn_untyped_record, 11 | warn_unused_function, 12 | warn_unused_record, 13 | warn_unused_vars, 14 | debug_info, 15 | {parse_transform, lager_transform} 16 | ]}. 17 | 18 | {deps, [ 19 | {graphql, 20 | {git, "https://github.com/shopgun/graphql-erlang.git", {branch, "master"}}}, 21 | {cowboy, {git, "https://github.com/ninenines/cowboy.git", {tag, "2.2.0"}}}, 22 | {lager, {git, "https://github.com/erlang-lager/lager.git", {tag, "3.4.2"}}}, 23 | {jsx, {git, "https://github.com/talentdeficit/jsx.git", {branch, "master"}}}, 24 | {recon, {git, "https://github.com/ferd/recon.git", {branch, "master"}}}, 25 | {iso8601, {git, "https://github.com/erlsci/iso8601.git", {branch, "master"}}} 26 | ]}. 27 | 28 | 29 | {relx, [ 30 | {release, {sw, "1.0.0"}, [ 31 | sw_core, 32 | sw_web, 33 | recon 34 | ]}, 35 | 36 | {dev_mode, false}, 37 | {sys_config, "./config/sys.config"}, 38 | {vm_args, "./config/vm.args"}, 39 | 40 | {overlay, [ 41 | {mkdir, "db"}, 42 | {mkdir, "log"}, 43 | {mkdir, "log/sasl"}, 44 | 45 | {copy, "{{root_dir}}/db/FALLBACK.BUP", 46 | "{{output_dir}}/db/FALLBACK.BUP"} 47 | ]}, 48 | {extended_start_script, true}]}. 49 | 50 | -------------------------------------------------------------------------------- /rebar.lock: -------------------------------------------------------------------------------- 1 | {"1.1.0", 2 | [{<<"cowboy">>, 3 | {git,"https://github.com/ninenines/cowboy.git", 4 | {ref,"572d38082f177a9b975153569b928375a5902b48"}}, 5 | 0}, 6 | {<<"cowlib">>, 7 | {git,"https://github.com/ninenines/cowlib", 8 | {ref,"3ef5b48a028bb66f82b452c98ae515903096641c"}}, 9 | 1}, 10 | {<<"goldrush">>,{pkg,<<"goldrush">>,<<"0.1.9">>},1}, 11 | {<<"graphql">>, 12 | {git,"https://github.com/shopgun/graphql-erlang.git", 13 | {ref,"663329fa69a014fe28f4437f84fb91bc3843d650"}}, 14 | 0}, 15 | {<<"iso8601">>, 16 | {git,"https://github.com/erlsci/iso8601.git", 17 | {ref,"14ec9ecbea7aaef08176a2784c59b51ea04e5b82"}}, 18 | 0}, 19 | {<<"jsx">>, 20 | {git,"https://github.com/talentdeficit/jsx.git", 21 | {ref,"45ffea21a6863c58fb7da1f937e868916ff68b27"}}, 22 | 0}, 23 | {<<"lager">>, 24 | {git,"https://github.com/erlang-lager/lager.git", 25 | {ref,"a10c23c51dea6bbd63e5048c8a5a702169973114"}}, 26 | 0}, 27 | {<<"ranch">>, 28 | {git,"https://github.com/ninenines/ranch", 29 | {ref,"55c2a9d623454f372a15e99721a37093d8773b48"}}, 30 | 1}, 31 | {<<"recon">>, 32 | {git,"https://github.com/ferd/recon.git", 33 | {ref,"08b5e9d71de76115d2ac98b19ee5ff8b356d11a2"}}, 34 | 0}]}. 35 | [ 36 | {pkg_hash,[ 37 | {<<"goldrush">>, <<"F06E5D5F1277DA5C413E84D5A2924174182FB108DABB39D5EC548B27424CD106">>}]} 38 | ]. 39 | -------------------------------------------------------------------------------- /talks/euc-2017/Makefile: -------------------------------------------------------------------------------- 1 | default: graphql.pdf 2 | 3 | graphql.pdf: graphql.tex compiler.pdf 4 | lualatex graphql.tex 5 | 6 | compiler.pdf: compiler.dot 7 | dot -Tpdf compiler.dot > compiler.pdf 8 | 9 | -------------------------------------------------------------------------------- /talks/euc-2017/compiler.dot: -------------------------------------------------------------------------------- 1 | digraph compiler { 2 | node [fontname="Helvetica",fontsize=12,shape=plaintext,labelfontname=Helvetica]; 3 | rankdir=LR; 4 | labeljust = l; 5 | labelloc = t; 6 | fontsize = 24; 7 | fontname="Helvetica"; 8 | nodesep=0.5; 9 | 10 | query [label=Query] 11 | ast [label=AST] 12 | annot [label="AST2"] 13 | result [label="Result"] 14 | params [label="Parameters"] 15 | elab_params [label="Elab Params"] 16 | 17 | query -> ast [label="Lex/Parse"] 18 | ast -> annot [label="Elaborate"] 19 | annot -> annot [label="Type Check / Validate"] 20 | annot -> result [label="Execute", color="red"] 21 | params -> elab_params [label="Type Check", color="red"] 22 | elab_params -> annot [color="red"] 23 | } 24 | -------------------------------------------------------------------------------- /talks/euc-2017/graphql.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jlouis/graphql-erlang-tutorial/c2cebedf45e8b2bd3eb798977ebb72e13b9bfd05/talks/euc-2017/graphql.pdf -------------------------------------------------------------------------------- /test/Makefile: -------------------------------------------------------------------------------- 1 | test: 2 | $(MAKE) -C .. test 3 | -------------------------------------------------------------------------------- /test/sw_SUITE.erl: -------------------------------------------------------------------------------- 1 | -module(sw_SUITE). 2 | -include_lib("common_test/include/ct.hrl"). 3 | 4 | -compile(export_all). 5 | 6 | suite() -> 7 | [{timetrap, {seconds, 30}}]. 8 | 9 | init_per_group(_Group, Config) -> 10 | Config. 11 | 12 | end_per_group(_Group, _Config) -> 13 | ok. 14 | 15 | init_per_suite(Config) -> 16 | {ok, _} = application:ensure_all_started(graphql), 17 | ok = init_mnesia(Config), 18 | {ok, _} = application:ensure_all_started(sw_core), 19 | Config. 20 | 21 | end_per_suite(_Config) -> 22 | ok. 23 | 24 | init_per_testcase(_Case, Config) -> 25 | Config. 26 | 27 | end_per_testcase(_Case, _Config) -> 28 | ok. 29 | 30 | groups() -> 31 | [{setup, [], [live]}, 32 | {queries, [], [system_tour_queries, 33 | enum_queries]}]. 34 | 35 | all() -> [ 36 | {group, setup}, 37 | {group, queries} 38 | ]. 39 | 40 | %% -- Mnesia initialization ---------------------- 41 | init_mnesia(Config) -> 42 | {ok, _} = application:ensure_all_started(mnesia), 43 | FixtureDir = ?config(data_dir, Config), 44 | ok = sw_core_db:create_fixture(ram_copies, FixtureDir), 45 | ok. 46 | 47 | %% -- SETUP -------------------------------------- 48 | live(Config) -> 49 | Running = [element(1, Runners) 50 | || Runners <- proplists:get_value(running, application:info())], 51 | true = lists:member(sw_core, Running), 52 | ok. 53 | 54 | %% -- TOUR --------------------------------------- 55 | system_tour_queries(Config) -> 56 | ok = run_query(Config, "first"), 57 | ok = run_query(Config, "advanced"), 58 | ok = run_query(Config, "mutation", <<"IntroduceFaction">>), 59 | ok = run_query(Config, "faction"), 60 | ok = run_query(Config, "bwing"), 61 | ok. 62 | 63 | enum_queries(Config) -> 64 | ok = run_query(Config, "enum"), 65 | ok. 66 | 67 | %% -- INTERNALS ---------------------------------- 68 | run_query(Config, Name) -> 69 | run_query(Config, Name, undefined). 70 | 71 | run_query(Config, Name, OpName) -> 72 | DataDir = ?config(data_dir, Config), 73 | Query = filename:join(DataDir, Name ++ ".query"), 74 | Result = filename:join(DataDir, Name ++ ".result"), 75 | Vars = input(Config, Name), 76 | 77 | {ok, QueryDoc} = file:read_file(Query), 78 | {ok, ExpectedJson} = file:read_file(Result), 79 | Expected = canonicalize_json(ExpectedJson), 80 | 81 | {ok, AST} = graphql:parse(QueryDoc), 82 | Elaborated = graphql:elaborate(AST), 83 | {ok, #{ fun_env := FunEnv, ast := AST2}} = 84 | graphql:type_check(Elaborated), 85 | Coerced = graphql:type_check_params(FunEnv, OpName, Vars), 86 | Ctx = #{ 87 | params => Coerced, 88 | operation_name => OpName 89 | }, 90 | Response = graphql:execute(Ctx, AST2), 91 | Expected = jsx:encode(Response), 92 | ok. 93 | 94 | input(Config, Name) -> 95 | DataDir = ?config(data_dir, Config), 96 | case file:read_file( 97 | filename:join(DataDir, Name ++ ".input")) of 98 | {error, enoent} -> 99 | #{}; 100 | {ok, InputData} -> 101 | jsx:decode(InputData, [return_maps]) 102 | end. 103 | 104 | canonicalize_json(Input) -> 105 | jsx:encode(jsx:decode(Input)). 106 | -------------------------------------------------------------------------------- /test/sw_SUITE_data/advanced.query: -------------------------------------------------------------------------------- 1 | query Q { 2 | node(id:"UGxhbmV0OjE=") { 3 | ... on Planet { 4 | id 5 | name 6 | climate 7 | filmConnection(first: 2) { 8 | totalCount 9 | pageInfo { 10 | hasNextPage 11 | hasPreviousPage 12 | } 13 | edges { 14 | node { 15 | ...Films 16 | } 17 | cursor 18 | } 19 | } 20 | residentConnection(first: 3) { 21 | totalCount 22 | pageInfo { 23 | hasNextPage 24 | hasPreviousPage 25 | } 26 | edges { 27 | node { 28 | ...Residents 29 | } 30 | cursor 31 | } 32 | } 33 | } 34 | } 35 | } 36 | 37 | fragment Films on Film { 38 | id 39 | title 40 | director 41 | } 42 | 43 | fragment Residents on Person { 44 | id 45 | name 46 | gender 47 | } 48 | -------------------------------------------------------------------------------- /test/sw_SUITE_data/advanced.result: -------------------------------------------------------------------------------- 1 | { 2 | "data": { 3 | "node": { 4 | "climate": "arid", 5 | "filmConnection": { 6 | "edges": [ 7 | { 8 | "cursor": "MQ==", 9 | "node": { 10 | "director": "George Lucas", 11 | "id": "RmlsbTox", 12 | "title": "A New Hope" 13 | } 14 | }, 15 | { 16 | "cursor": "Mg==", 17 | "node": { 18 | "director": "Richard Marquand", 19 | "id": "RmlsbToz", 20 | "title": "Return of the Jedi" 21 | } 22 | } 23 | ], 24 | "pageInfo": { 25 | "hasNextPage": true, 26 | "hasPreviousPage": false 27 | }, 28 | "totalCount": 5 29 | }, 30 | "id": "UGxhbmV0OjE=", 31 | "name": "Tatooine", 32 | "residentConnection": { 33 | "edges": [ 34 | { 35 | "cursor": "MQ==", 36 | "node": { 37 | "gender": "n/a", 38 | "id": "UGVyc29uOjg=", 39 | "name": "R5-D4" 40 | } 41 | }, 42 | { 43 | "cursor": "Mg==", 44 | "node": { 45 | "gender": "male", 46 | "id": "UGVyc29uOjEx", 47 | "name": "Anakin Skywalker" 48 | } 49 | }, 50 | { 51 | "cursor": "Mw==", 52 | "node": { 53 | "gender": "male", 54 | "id": "UGVyc29uOjE=", 55 | "name": "Luke Skywalker" 56 | } 57 | } 58 | ], 59 | "pageInfo": { 60 | "hasNextPage": true, 61 | "hasPreviousPage": false 62 | }, 63 | "totalCount": 10 64 | } 65 | } 66 | } 67 | } 68 | -------------------------------------------------------------------------------- /test/sw_SUITE_data/bwing.query: -------------------------------------------------------------------------------- 1 | mutation IntroduceBWing { 2 | introduceStarship(input: 3 | { costInCredits: 5.0, # <1> 4 | length: 20.0, 5 | crew: "1", 6 | name: "B-Wing", 7 | faction: "RmFjdGlvbjoxMDAx", # <2> 8 | starshipClass: "fighter"}) { 9 | starship { 10 | id 11 | name 12 | } 13 | faction { 14 | id 15 | name 16 | ships { 17 | totalCount 18 | 19 | edges { 20 | node { 21 | id name 22 | } 23 | } 24 | } 25 | } 26 | } 27 | } 28 | -------------------------------------------------------------------------------- /test/sw_SUITE_data/bwing.result: -------------------------------------------------------------------------------- 1 | { 2 | "data": { 3 | "introduceStarship": { 4 | "faction": { 5 | "id": "RmFjdGlvbjoxMDAx", 6 | "name": "Rebels", 7 | "ships": { 8 | "edges": [ 9 | { 10 | "node": { 11 | "id": "U3RhcnNoaXA6MTAwMQ==", 12 | "name": "B-Wing" 13 | } 14 | } 15 | ], 16 | "totalCount": 1 17 | } 18 | }, 19 | "starship": { 20 | "id": "U3RhcnNoaXA6MTAwMQ==", 21 | "name": "B-Wing" 22 | } 23 | } 24 | } 25 | } 26 | -------------------------------------------------------------------------------- /test/sw_SUITE_data/enum.query: -------------------------------------------------------------------------------- 1 | query FilmQuery { 2 | filmByEpisode(episode: JEDI) { 3 | id 4 | title 5 | episodeID 6 | episode 7 | } 8 | } 9 | -------------------------------------------------------------------------------- /test/sw_SUITE_data/enum.result: -------------------------------------------------------------------------------- 1 | { "data" : 2 | { "filmByEpisode" : 3 | { "episode" : "JEDI", 4 | "episodeID" : 6, 5 | "id" : "RmlsbToz", 6 | "title" : "Return of the Jedi" 7 | } 8 | } 9 | } 10 | -------------------------------------------------------------------------------- /test/sw_SUITE_data/faction.query: -------------------------------------------------------------------------------- 1 | query FactionQuery { 2 | node(id: "RmFjdGlvbjoxMDAx") { 3 | ... on Faction { 4 | id 5 | name 6 | } 7 | } 8 | } 9 | -------------------------------------------------------------------------------- /test/sw_SUITE_data/faction.result: -------------------------------------------------------------------------------- 1 | { 2 | "data" : { 3 | "node" : { 4 | "id" : "RmFjdGlvbjoxMDAx", 5 | "name" :"Rebels" 6 | } 7 | } 8 | } 9 | -------------------------------------------------------------------------------- /test/sw_SUITE_data/first.query: -------------------------------------------------------------------------------- 1 | query PlanetQuery { 2 | node(id:"UGxhbmV0OjE=") { # <1> 3 | ... on Planet { # <2> 4 | id # <3> 5 | name 6 | climate 7 | } 8 | } 9 | } 10 | -------------------------------------------------------------------------------- /test/sw_SUITE_data/first.result: -------------------------------------------------------------------------------- 1 | { 2 | "data": { 3 | "node": { 4 | "climate": "arid", 5 | "id": "UGxhbmV0OjE=", 6 | "name": "Tatooine" 7 | } 8 | } 9 | } 10 | -------------------------------------------------------------------------------- /test/sw_SUITE_data/mutation.input: -------------------------------------------------------------------------------- 1 | { 2 | "input": { 3 | "clientMutationId": "D9A5939A-DF75-4C78-9B32-04C1C64F9D9C", // <1> 4 | "name": "Rebels" 5 | } 6 | } 7 | -------------------------------------------------------------------------------- /test/sw_SUITE_data/mutation.query: -------------------------------------------------------------------------------- 1 | mutation IntroduceFaction($input: IntroduceFactionInput!) { 2 | introduceFaction(input: $input) { 3 | clientMutationId 4 | faction { 5 | id 6 | name 7 | ships { 8 | totalCount 9 | } 10 | } 11 | } 12 | } 13 | -------------------------------------------------------------------------------- /test/sw_SUITE_data/mutation.result: -------------------------------------------------------------------------------- 1 | { 2 | "data": { 3 | "introduceFaction": { 4 | "clientMutationId": "D9A5939A-DF75-4C78-9B32-04C1C64F9D9C", // <1> 5 | "faction": { 6 | "id": "RmFjdGlvbjoxMDAx", // <2> 7 | "name": "Rebels", 8 | "ships": { 9 | "totalCount": 0 // <3> 10 | } 11 | } 12 | } 13 | } 14 | } 15 | -------------------------------------------------------------------------------- /test/sw_SUITE_data/starships.json: -------------------------------------------------------------------------------- 1 | [ 2 | { 3 | "fields": { 4 | "pilots": [], 5 | "MGLT": "60", 6 | "starship_class": "corvette", 7 | "hyperdrive_rating": "2.0" 8 | }, 9 | "model": "resources.starship", 10 | "pk": 2 11 | }, 12 | { 13 | "fields": { 14 | "pilots": [], 15 | "MGLT": "60", 16 | "starship_class": "Star Destroyer", 17 | "hyperdrive_rating": "2.0" 18 | }, 19 | "model": "resources.starship", 20 | "pk": 3 21 | }, 22 | { 23 | "fields": { 24 | "pilots": [], 25 | "MGLT": "70", 26 | "starship_class": "landing craft", 27 | "hyperdrive_rating": "1.0" 28 | }, 29 | "model": "resources.starship", 30 | "pk": 5 31 | }, 32 | { 33 | "fields": { 34 | "pilots": [], 35 | "MGLT": "10", 36 | "starship_class": "Deep Space Mobile Battlestation", 37 | "hyperdrive_rating": "4.0" 38 | }, 39 | "model": "resources.starship", 40 | "pk": 9 41 | }, 42 | { 43 | "fields": { 44 | "pilots": [ 45 | 13, 46 | 14, 47 | 25, 48 | 31 49 | ], 50 | "MGLT": "75", 51 | "starship_class": "Light freighter", 52 | "hyperdrive_rating": "0.5" 53 | }, 54 | "model": "resources.starship", 55 | "pk": 10 56 | }, 57 | { 58 | "fields": { 59 | "pilots": [], 60 | "MGLT": "80", 61 | "starship_class": "assault starfighter", 62 | "hyperdrive_rating": "1.0" 63 | }, 64 | "model": "resources.starship", 65 | "pk": 11 66 | }, 67 | { 68 | "fields": { 69 | "pilots": [ 70 | 1, 71 | 9, 72 | 18, 73 | 19 74 | ], 75 | "MGLT": "100", 76 | "starship_class": "Starfighter", 77 | "hyperdrive_rating": "1.0" 78 | }, 79 | "model": "resources.starship", 80 | "pk": 12 81 | }, 82 | { 83 | "fields": { 84 | "pilots": [ 85 | 4 86 | ], 87 | "MGLT": "105", 88 | "starship_class": "Starfighter", 89 | "hyperdrive_rating": "1.0" 90 | }, 91 | "model": "resources.starship", 92 | "pk": 13 93 | }, 94 | { 95 | "fields": { 96 | "pilots": [], 97 | "MGLT": "40", 98 | "starship_class": "Star dreadnought", 99 | "hyperdrive_rating": "2.0" 100 | }, 101 | "model": "resources.starship", 102 | "pk": 15 103 | }, 104 | { 105 | "fields": { 106 | "pilots": [], 107 | "MGLT": "20", 108 | "starship_class": "Medium transport", 109 | "hyperdrive_rating": "4.0" 110 | }, 111 | "model": "resources.starship", 112 | "pk": 17 113 | }, 114 | { 115 | "fields": { 116 | "pilots": [ 117 | 22 118 | ], 119 | "MGLT": "70", 120 | "starship_class": "Patrol craft", 121 | "hyperdrive_rating": "3.0" 122 | }, 123 | "model": "resources.starship", 124 | "pk": 21 125 | }, 126 | { 127 | "fields": { 128 | "pilots": [ 129 | 1, 130 | 13, 131 | 14 132 | ], 133 | "MGLT": "50", 134 | "starship_class": "Armed government transport", 135 | "hyperdrive_rating": "1.0" 136 | }, 137 | "model": "resources.starship", 138 | "pk": 22 139 | }, 140 | { 141 | "fields": { 142 | "pilots": [], 143 | "MGLT": "40", 144 | "starship_class": "Escort ship", 145 | "hyperdrive_rating": "2.0" 146 | }, 147 | "model": "resources.starship", 148 | "pk": 23 149 | }, 150 | { 151 | "fields": { 152 | "pilots": [], 153 | "MGLT": "60", 154 | "starship_class": "Star Cruiser", 155 | "hyperdrive_rating": "1.0" 156 | }, 157 | "model": "resources.starship", 158 | "pk": 27 159 | }, 160 | { 161 | "fields": { 162 | "pilots": [ 163 | 29 164 | ], 165 | "MGLT": "120", 166 | "starship_class": "Starfighter", 167 | "hyperdrive_rating": "1.0" 168 | }, 169 | "model": "resources.starship", 170 | "pk": 28 171 | }, 172 | { 173 | "fields": { 174 | "pilots": [], 175 | "MGLT": "91", 176 | "starship_class": "Assault Starfighter", 177 | "hyperdrive_rating": "2.0" 178 | }, 179 | "model": "resources.starship", 180 | "pk": 29 181 | }, 182 | { 183 | "fields": { 184 | "pilots": [], 185 | "MGLT": "unknown", 186 | "starship_class": "Space cruiser", 187 | "hyperdrive_rating": "2.0" 188 | }, 189 | "model": "resources.starship", 190 | "pk": 31 191 | }, 192 | { 193 | "fields": { 194 | "pilots": [], 195 | "MGLT": "unknown", 196 | "starship_class": "Droid control ship", 197 | "hyperdrive_rating": "2.0" 198 | }, 199 | "model": "resources.starship", 200 | "pk": 32 201 | }, 202 | { 203 | "fields": { 204 | "pilots": [ 205 | 11, 206 | 35, 207 | 60 208 | ], 209 | "MGLT": "unknown", 210 | "starship_class": "Starfighter", 211 | "hyperdrive_rating": "1.0" 212 | }, 213 | "model": "resources.starship", 214 | "pk": 39 215 | }, 216 | { 217 | "fields": { 218 | "pilots": [ 219 | 39 220 | ], 221 | "MGLT": "unknown", 222 | "starship_class": "yacht", 223 | "hyperdrive_rating": "1.8" 224 | }, 225 | "model": "resources.starship", 226 | "pk": 40 227 | }, 228 | { 229 | "fields": { 230 | "pilots": [ 231 | 44 232 | ], 233 | "MGLT": "unknown", 234 | "starship_class": "Space Transport", 235 | "hyperdrive_rating": "1.5" 236 | }, 237 | "model": "resources.starship", 238 | "pk": 41 239 | }, 240 | { 241 | "fields": { 242 | "pilots": [], 243 | "MGLT": "unknown", 244 | "starship_class": "Diplomatic barge", 245 | "hyperdrive_rating": "0.7" 246 | }, 247 | "model": "resources.starship", 248 | "pk": 43 249 | }, 250 | { 251 | "fields": { 252 | "pilots": [], 253 | "MGLT": "unknown", 254 | "starship_class": "freighter", 255 | "hyperdrive_rating": "unknown" 256 | }, 257 | "model": "resources.starship", 258 | "pk": 47 259 | }, 260 | { 261 | "fields": { 262 | "pilots": [ 263 | 10, 264 | 58 265 | ], 266 | "MGLT": "unknown", 267 | "starship_class": "Starfighter", 268 | "hyperdrive_rating": "1.0" 269 | }, 270 | "model": "resources.starship", 271 | "pk": 48 272 | }, 273 | { 274 | "fields": { 275 | "pilots": [ 276 | 35 277 | ], 278 | "MGLT": "unknown", 279 | "starship_class": "yacht", 280 | "hyperdrive_rating": "0.9" 281 | }, 282 | "model": "resources.starship", 283 | "pk": 49 284 | }, 285 | { 286 | "fields": { 287 | "pilots": [], 288 | "MGLT": "unknown", 289 | "starship_class": "assault ship", 290 | "hyperdrive_rating": "0.6" 291 | }, 292 | "model": "resources.starship", 293 | "pk": 52 294 | }, 295 | { 296 | "fields": { 297 | "pilots": [], 298 | "MGLT": "unknown", 299 | "starship_class": "yacht", 300 | "hyperdrive_rating": "1.5" 301 | }, 302 | "model": "resources.starship", 303 | "pk": 58 304 | }, 305 | { 306 | "fields": { 307 | "pilots": [ 308 | 10, 309 | 11 310 | ], 311 | "MGLT": "unknown", 312 | "starship_class": "capital ship", 313 | "hyperdrive_rating": "1.5" 314 | }, 315 | "model": "resources.starship", 316 | "pk": 59 317 | }, 318 | { 319 | "fields": { 320 | "pilots": [], 321 | "MGLT": "unknown", 322 | "starship_class": "transport", 323 | "hyperdrive_rating": "1.0" 324 | }, 325 | "model": "resources.starship", 326 | "pk": 61 327 | }, 328 | { 329 | "fields": { 330 | "pilots": [], 331 | "MGLT": "unknown", 332 | "starship_class": "star destroyer", 333 | "hyperdrive_rating": "1.0" 334 | }, 335 | "model": "resources.starship", 336 | "pk": 63 337 | }, 338 | { 339 | "fields": { 340 | "pilots": [ 341 | 10, 342 | 35 343 | ], 344 | "MGLT": "unknown", 345 | "starship_class": "yacht", 346 | "hyperdrive_rating": "0.5" 347 | }, 348 | "model": "resources.starship", 349 | "pk": 64 350 | }, 351 | { 352 | "fields": { 353 | "pilots": [ 354 | 10, 355 | 11 356 | ], 357 | "MGLT": "unknown", 358 | "starship_class": "starfighter", 359 | "hyperdrive_rating": "1.0" 360 | }, 361 | "model": "resources.starship", 362 | "pk": 65 363 | }, 364 | { 365 | "fields": { 366 | "pilots": [], 367 | "MGLT": "100", 368 | "starship_class": "starfighter", 369 | "hyperdrive_rating": "1.0" 370 | }, 371 | "model": "resources.starship", 372 | "pk": 66 373 | }, 374 | { 375 | "fields": { 376 | "pilots": [], 377 | "MGLT": "unknown", 378 | "starship_class": "cruiser", 379 | "hyperdrive_rating": "1.0" 380 | }, 381 | "model": "resources.starship", 382 | "pk": 68 383 | }, 384 | { 385 | "fields": { 386 | "pilots": [ 387 | 10, 388 | 79 389 | ], 390 | "MGLT": "unknown", 391 | "starship_class": "starfighter", 392 | "hyperdrive_rating": "6" 393 | }, 394 | "model": "resources.starship", 395 | "pk": 74 396 | }, 397 | { 398 | "fields": { 399 | "pilots": [], 400 | "MGLT": "unknown", 401 | "starship_class": "starfighter", 402 | "hyperdrive_rating": "1.0" 403 | }, 404 | "model": "resources.starship", 405 | "pk": 75 406 | } 407 | ] 408 | -------------------------------------------------------------------------------- /test/sw_SUITE_data/vehicles.json: -------------------------------------------------------------------------------- 1 | [ 2 | { 3 | "fields": { 4 | "vehicle_class": "wheeled", 5 | "pilots": [] 6 | }, 7 | "model": "resources.vehicle", 8 | "pk": 4 9 | }, 10 | { 11 | "fields": { 12 | "vehicle_class": "repulsorcraft", 13 | "pilots": [] 14 | }, 15 | "model": "resources.vehicle", 16 | "pk": 6 17 | }, 18 | { 19 | "fields": { 20 | "vehicle_class": "repulsorcraft", 21 | "pilots": [] 22 | }, 23 | "model": "resources.vehicle", 24 | "pk": 7 25 | }, 26 | { 27 | "fields": { 28 | "vehicle_class": "starfighter", 29 | "pilots": [] 30 | }, 31 | "model": "resources.vehicle", 32 | "pk": 8 33 | }, 34 | { 35 | "fields": { 36 | "vehicle_class": "airspeeder", 37 | "pilots": [ 38 | 1, 39 | 18 40 | ] 41 | }, 42 | "model": "resources.vehicle", 43 | "pk": 14 44 | }, 45 | { 46 | "fields": { 47 | "vehicle_class": "space/planetary bomber", 48 | "pilots": [] 49 | }, 50 | "model": "resources.vehicle", 51 | "pk": 16 52 | }, 53 | { 54 | "fields": { 55 | "vehicle_class": "assault walker", 56 | "pilots": [] 57 | }, 58 | "model": "resources.vehicle", 59 | "pk": 18 60 | }, 61 | { 62 | "fields": { 63 | "vehicle_class": "walker", 64 | "pilots": [ 65 | 13 66 | ] 67 | }, 68 | "model": "resources.vehicle", 69 | "pk": 19 70 | }, 71 | { 72 | "fields": { 73 | "vehicle_class": "repulsorcraft", 74 | "pilots": [] 75 | }, 76 | "model": "resources.vehicle", 77 | "pk": 20 78 | }, 79 | { 80 | "fields": { 81 | "vehicle_class": "sail barge", 82 | "pilots": [] 83 | }, 84 | "model": "resources.vehicle", 85 | "pk": 24 86 | }, 87 | { 88 | "fields": { 89 | "vehicle_class": "repulsorcraft cargo skiff", 90 | "pilots": [] 91 | }, 92 | "model": "resources.vehicle", 93 | "pk": 25 94 | }, 95 | { 96 | "fields": { 97 | "vehicle_class": "starfighter", 98 | "pilots": [] 99 | }, 100 | "model": "resources.vehicle", 101 | "pk": 26 102 | }, 103 | { 104 | "fields": { 105 | "vehicle_class": "speeder", 106 | "pilots": [ 107 | 1, 108 | 5 109 | ] 110 | }, 111 | "model": "resources.vehicle", 112 | "pk": 30 113 | }, 114 | { 115 | "fields": { 116 | "vehicle_class": "starfighter", 117 | "pilots": [] 118 | }, 119 | "model": "resources.vehicle", 120 | "pk": 33 121 | }, 122 | { 123 | "fields": { 124 | "vehicle_class": "repulsorcraft", 125 | "pilots": [] 126 | }, 127 | "model": "resources.vehicle", 128 | "pk": 34 129 | }, 130 | { 131 | "fields": { 132 | "vehicle_class": "repulsorcraft", 133 | "pilots": [] 134 | }, 135 | "model": "resources.vehicle", 136 | "pk": 35 137 | }, 138 | { 139 | "fields": { 140 | "vehicle_class": "repulsorcraft", 141 | "pilots": [] 142 | }, 143 | "model": "resources.vehicle", 144 | "pk": 36 145 | }, 146 | { 147 | "fields": { 148 | "vehicle_class": "landing craft", 149 | "pilots": [] 150 | }, 151 | "model": "resources.vehicle", 152 | "pk": 37 153 | }, 154 | { 155 | "fields": { 156 | "vehicle_class": "submarine", 157 | "pilots": [ 158 | 10, 159 | 32 160 | ] 161 | }, 162 | "model": "resources.vehicle", 163 | "pk": 38 164 | }, 165 | { 166 | "fields": { 167 | "vehicle_class": "speeder", 168 | "pilots": [ 169 | 44 170 | ] 171 | }, 172 | "model": "resources.vehicle", 173 | "pk": 42 174 | }, 175 | { 176 | "fields": { 177 | "vehicle_class": "repulsorcraft", 178 | "pilots": [ 179 | 11 180 | ] 181 | }, 182 | "model": "resources.vehicle", 183 | "pk": 44 184 | }, 185 | { 186 | "fields": { 187 | "vehicle_class": "airspeeder", 188 | "pilots": [ 189 | 70 190 | ] 191 | }, 192 | "model": "resources.vehicle", 193 | "pk": 45 194 | }, 195 | { 196 | "fields": { 197 | "vehicle_class": "airspeeder", 198 | "pilots": [ 199 | 11 200 | ] 201 | }, 202 | "model": "resources.vehicle", 203 | "pk": 46 204 | }, 205 | { 206 | "fields": { 207 | "vehicle_class": "gunship", 208 | "pilots": [] 209 | }, 210 | "model": "resources.vehicle", 211 | "pk": 50 212 | }, 213 | { 214 | "fields": { 215 | "vehicle_class": "gunship", 216 | "pilots": [] 217 | }, 218 | "model": "resources.vehicle", 219 | "pk": 51 220 | }, 221 | { 222 | "fields": { 223 | "vehicle_class": "walker", 224 | "pilots": [] 225 | }, 226 | "model": "resources.vehicle", 227 | "pk": 53 228 | }, 229 | { 230 | "fields": { 231 | "vehicle_class": "walker", 232 | "pilots": [] 233 | }, 234 | "model": "resources.vehicle", 235 | "pk": 54 236 | }, 237 | { 238 | "fields": { 239 | "vehicle_class": "speeder", 240 | "pilots": [ 241 | 67 242 | ] 243 | }, 244 | "model": "resources.vehicle", 245 | "pk": 55 246 | }, 247 | { 248 | "fields": { 249 | "vehicle_class": "transport", 250 | "pilots": [] 251 | }, 252 | "model": "resources.vehicle", 253 | "pk": 56 254 | }, 255 | { 256 | "fields": { 257 | "vehicle_class": "starfighter", 258 | "pilots": [] 259 | }, 260 | "model": "resources.vehicle", 261 | "pk": 57 262 | }, 263 | { 264 | "fields": { 265 | "vehicle_class": "wheeled walker", 266 | "pilots": [ 267 | 79 268 | ] 269 | }, 270 | "model": "resources.vehicle", 271 | "pk": 60 272 | }, 273 | { 274 | "fields": { 275 | "vehicle_class": "fire suppression ship", 276 | "pilots": [] 277 | }, 278 | "model": "resources.vehicle", 279 | "pk": 62 280 | }, 281 | { 282 | "fields": { 283 | "vehicle_class": "droid starfighter", 284 | "pilots": [] 285 | }, 286 | "model": "resources.vehicle", 287 | "pk": 67 288 | }, 289 | { 290 | "fields": { 291 | "vehicle_class": "airspeeder", 292 | "pilots": [] 293 | }, 294 | "model": "resources.vehicle", 295 | "pk": 69 296 | }, 297 | { 298 | "fields": { 299 | "vehicle_class": "air speeder", 300 | "pilots": [] 301 | }, 302 | "model": "resources.vehicle", 303 | "pk": 70 304 | }, 305 | { 306 | "fields": { 307 | "vehicle_class": "wheeled walker", 308 | "pilots": [] 309 | }, 310 | "model": "resources.vehicle", 311 | "pk": 71 312 | }, 313 | { 314 | "fields": { 315 | "vehicle_class": "droid tank", 316 | "pilots": [] 317 | }, 318 | "model": "resources.vehicle", 319 | "pk": 72 320 | }, 321 | { 322 | "fields": { 323 | "vehicle_class": "airspeeder", 324 | "pilots": [] 325 | }, 326 | "model": "resources.vehicle", 327 | "pk": 73 328 | }, 329 | { 330 | "fields": { 331 | "vehicle_class": "walker", 332 | "pilots": [] 333 | }, 334 | "model": "resources.vehicle", 335 | "pk": 76 336 | } 337 | ] 338 | --------------------------------------------------------------------------------