20 |
21 | ## Intro
22 |
23 | Brainyduck helps you transition your backend to a top notch serverless environment while keeping the developer experience neat! 🌈🍦🐥
24 |
25 | Worry not about new and complex setup and deployment requisites: The graphql schemas you already have is all you need to build a world-class & reliable endpoint.
26 |
27 | Just run `npx brainyduck` on your schemas and the times in which you had to manually setup your backend will forever be gone! Never find yourself redefining types in multiple files, ever again. 🥹
28 |
29 | 
30 |
31 | ## Documentation
32 |
33 | Please refer to the [documentation](https://duck.brainy.sh/#/?id=why) in order to [get started](https://duck.brainy.sh/#/?id=getting-started) 🐣.
34 |
35 | 
36 |
37 | ## Sponsors
38 |
39 |
50 |
--------------------------------------------------------------------------------
/cli.js:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env node
2 |
3 | import fs from 'fs'
4 | import resolve from 'resolve-cwd'
5 | import { program } from 'commander'
6 | import { constantCase } from 'constant-case'
7 | import { fileURLToPath } from 'node:url'
8 | import { patterns } from './utils.js'
9 |
10 | let locallyInstalled = false
11 | try {
12 | locallyInstalled = Boolean(resolve('brainyduck/utils'))
13 | } catch (e) {}
14 |
15 | const findCommandFile = (commandName) =>
16 | locallyInstalled
17 | ? resolve(`brainyduck/${commandName}`)
18 | : fileURLToPath(new URL(`./commands/${commandName}.js`, import.meta.url))
19 |
20 | const prefix = {
21 | FAUNA: 'FAUNA',
22 | BRAINYDUCK: 'BRAINYDUCK',
23 | }
24 |
25 | const pkg = JSON.parse(
26 | await fs.readFileSync(fileURLToPath(new URL('./package.json', import.meta.url)))
27 | )
28 |
29 | const optionParser =
30 | (key, _prefix = prefix.BRAINYDUCK) =>
31 | () => {
32 | let name = constantCase(key)
33 | if (_prefix) {
34 | name = `${_prefix}_${name}`
35 | }
36 |
37 | return (process.env[name] = program.opts()[key])
38 | }
39 |
40 | program
41 | .version(pkg.version)
42 |
43 | .hook('preSubcommand', (thisCommand, subcommand) => {
44 | if (['build', 'pack', 'export', 'dev'].includes(subcommand._name) && !locallyInstalled) {
45 | console.error(
46 | `Looks like brainyduck is not installed locally and the command '${subcommand._name}' requires a local installation.\nHave you installed brainyduck globally instead?`
47 | )
48 | throw new Error('You must install brainyduck locally.')
49 | }
50 | })
51 |
52 | .option(
53 | '-s, --secret ',
54 | `set Fauna's secret key, used to deploy data to your database (defaults to <${prefix.FAUNA}_SECRET>).`
55 | )
56 | .on('option:secret', optionParser('secret', prefix.FAUNA))
57 |
58 | .option(
59 | '--domain ',
60 | `FaunaDB server domain (defaults to <${prefix.FAUNA}_DOMAIN or 'db.fauna.com'>).`
61 | )
62 | .on('option:domain', optionParser('domain', prefix.FAUNA))
63 |
64 | .option('--port ', `Connection port (defaults to <${prefix.FAUNA}_PORT>).`)
65 | .on('option:port', optionParser('port', prefix.FAUNA))
66 |
67 | .option(
68 | '--graphql-domain ',
69 | `Graphql server domain (defaults to <${prefix.FAUNA}_GRAPHQL_DOMAIN or 'graphql.fauna.com'>).`
70 | )
71 | .on('option:graphql-domain', optionParser('graphqlDomain', prefix.FAUNA))
72 |
73 | .option(
74 | '--graphql-port ',
75 | `Graphql connection port (defaults to <${prefix.FAUNA}_GRAPHQL_PORT>).`
76 | )
77 | .on('option:graphql-port', optionParser('graphqlPort', prefix.FAUNA))
78 |
79 | .option(
80 | '--scheme ',
81 | `Connection scheme (defaults to <${prefix.FAUNA}_SCHEME or 'https'>).`
82 | )
83 | .on('option:scheme', optionParser('scheme', prefix.FAUNA))
84 |
85 | .option('--overwrite', `wipe out data related to the command before its execution`)
86 | .on('option:overwrite', optionParser('overwrite'))
87 |
88 | .option('--no-operations-generation', `disable the auto-generated operations documents.`)
89 | .on('option:no-operations-generation', function () {
90 | process.env.BRAINYDUCK_NO_OPERATIONS_GENERATION = !this.operationsGeneration
91 | })
92 |
93 | .option(
94 | '-f, --force ',
95 | `skip prompt confirmations (defaults to ',
101 | `set glob patterns to exclude matches (defaults to ).`
102 | )
103 | .on('option:ignore', optionParser('ignore'))
104 |
105 | .option('--no-watch', `disable the files watcher (only used in the dev command).`)
106 | .on('option:no-watch', function () {
107 | process.env.BRAINYDUCK_NO_WATCH = !this.watch
108 | })
109 |
110 | .option(
111 | '--only-changes',
112 | `ignore initial files and watch changes ONLY (only used in the dev command).`
113 | )
114 | .on('option:only-changes', optionParser('onlyChanges'))
115 |
116 | .option(
117 | '--callback ',
118 | `run external command after every execution completion (only used in the dev command).`
119 | )
120 | .on('option:callback', optionParser('callback'))
121 |
122 | .option('--tsconfig', `use a custom tsconfig file for the sdk transpilation.`)
123 | .on('option:tsconfig', function () {
124 | process.env.BRAINYDUCK_TSCONFIG = this.tsconfig
125 | })
126 |
127 | .option('--verbose', `run the command with verbose logging.`)
128 | .on('option:verbose', function () {
129 | process.env.DEBUG = 'brainyduck:*'
130 | })
131 |
132 | .option('--debug [port]', `run the command with debugging listening on [port].`)
133 | .on('option:debug', function () {
134 | process.env.NODE_OPTIONS = `--inspect=${this.debug || 9229}`
135 | })
136 |
137 | .option('--debug-brk [port]', `run the command with debugging(-brk) listening on [port].`)
138 | .on('option:debug-brk', function () {
139 | process.env.NODE_OPTIONS = `--inspect-brk=${this['debug-brk'] || 9229}`
140 | })
141 |
142 | .command(
143 | 'build [schemas-pattern] [documents-pattern] [output]',
144 | 'code generator that creates an easily accessible API. Defaults: [schemas-pattern: **/[A-Z]*.(graphql|gql), documents-pattern: **/[a-z]*.(graphql|gql) output: ]',
145 | {
146 | executableFile: findCommandFile(`build`),
147 | }
148 | )
149 |
150 | .command('export [destination]', 'export the built module as an independent node package', {
151 | executableFile: findCommandFile(`export`),
152 | })
153 |
154 | .command('pack', 'create a tarball from the built module', {
155 | executableFile: findCommandFile(`pack`),
156 | })
157 |
158 | .command('dev [directory]', 'build, deploy and watch for changes. Defaults: [directory: ]', {
159 | executableFile: findCommandFile(`dev`),
160 | isDefault: true,
161 | })
162 |
163 | .command(
164 | 'deploy [types]',
165 | 'deploy the local folder to your database. Defaults: [types: schemas,functions,indexes,roles]',
166 | {
167 | executableFile: findCommandFile(`deploy`),
168 | }
169 | )
170 |
171 | .command(
172 | 'deploy-schemas [pattern]',
173 | 'push your schema to faunadb. Defaults: [pattern: **/*.(graphql|gql)]',
174 | {
175 | executableFile: findCommandFile(`deploy-schemas`),
176 | }
177 | )
178 |
179 | .command(
180 | 'deploy-functions [pattern]',
181 | `upload your User-Defined Functions (UDF) to faunadb. Defaults: [pattern: ${patterns.UDF}]`,
182 | {
183 | executableFile: findCommandFile(`deploy-functions`),
184 | }
185 | )
186 |
187 | .command(
188 | 'deploy-indexes [pattern]',
189 | `upload your User-Defined Indexes to faunadb. Defaults: [pattern: ${patterns.INDEX}]`,
190 | {
191 | executableFile: findCommandFile(`deploy-indexes`),
192 | }
193 | )
194 |
195 | .command(
196 | 'deploy-roles [pattern]',
197 | `upload your User-Defined Roles (UDR) to faunadb. Defaults: [pattern: ${patterns.UDR}]`,
198 | {
199 | executableFile: findCommandFile(`deploy-roles`),
200 | }
201 | )
202 |
203 | .command(
204 | 'pull-schema [output]',
205 | 'load the schema hosted in faunadb. Defaults: [output: ]',
206 | {
207 | executableFile: findCommandFile(`pull-schema`),
208 | }
209 | )
210 |
211 | .command(
212 | 'reset [types]',
213 | 'wipe out all data in the database {BE CAREFUL!}. Defaults: [types: functions,indexes,roles,documents,collections,databases,schemas]',
214 | {
215 | executableFile: findCommandFile(`reset`),
216 | }
217 | )
218 |
219 | program.parse(process.argv)
220 |
--------------------------------------------------------------------------------
/commands/build.js:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env node
2 |
3 | import fs from 'fs'
4 | import path from 'path'
5 | import _debug from 'debug'
6 | import { parse } from 'graphql'
7 | import { codegen } from '@graphql-codegen/core'
8 | import { execaSync } from 'execa'
9 | import { fileURLToPath } from 'url'
10 | import * as typescriptPlugin from '@graphql-codegen/typescript'
11 | import * as typescriptOperations from '@graphql-codegen/typescript-operations'
12 | import * as typescriptGraphqlRequest from '@graphql-codegen/typescript-graphql-request'
13 | import { temporaryFile, temporaryDirectory } from 'tempy'
14 | import { findBin, pipeData, patternMatch, locateCache } from '../utils.js'
15 | import push from './deploy-schemas.js'
16 |
17 | const __filename = fileURLToPath(import.meta.url)
18 | const __dirname = path.dirname(__filename)
19 |
20 | const debug = _debug('brainyduck:build')
21 |
22 | const config = {
23 | filename: 'output.ts',
24 | plugins: [
25 | // Each plugin should be an object
26 | {
27 | typescript: {}, // Here you can pass configuration to the plugin
28 | },
29 | { ['typescript-operations']: {} },
30 | {
31 | ['typescript-graphql-request']: {},
32 | },
33 | ],
34 | pluginMap: {
35 | typescript: typescriptPlugin,
36 | ['typescript-operations']: typescriptOperations,
37 | ['typescript-graphql-request']: typescriptGraphqlRequest,
38 | },
39 | }
40 |
41 | const generateOperations = async (schema) => {
42 | debug(`generating operations documents`)
43 | const schemaFile = temporaryFile()
44 | let operationsDir = temporaryDirectory()
45 |
46 | fs.writeFileSync(schemaFile, schema)
47 | const { stdout, stderr, exitCode } = execaSync(
48 | findBin(`gqlg`),
49 | [`--schemaFilePath`, schemaFile, `--destDirPath`, `./output`],
50 | { cwd: operationsDir }
51 | )
52 | operationsDir = path.join(operationsDir, `./output`)
53 |
54 | if (exitCode) {
55 | console.error(stderr)
56 | throw new Error(`Brainyduck could not generate operations automatically`)
57 | }
58 |
59 | if (stderr) console.warn(stderr)
60 | if (stdout) debug(stdout)
61 | debug(`The operations documents have been auto generated at ${operationsDir}`)
62 | return operationsDir
63 | }
64 |
65 | const generateSdk = async (schema, documentsPattern) => {
66 | debug(`Looking for documents matching '${documentsPattern}'`)
67 | const autoGeneratedDocuments = process.env.BRAINYDUCK_NO_OPERATIONS_GENERATION
68 | ? []
69 | : await patternMatch(`**/*.gql`, await generateOperations(schema))
70 | debug(`${autoGeneratedDocuments.length} operations documents have been auto generated`)
71 |
72 | const documents = [
73 | ...autoGeneratedDocuments,
74 | ...(await patternMatch(
75 | Array.isArray(documentsPattern) ? documentsPattern : documentsPattern.split(',')
76 | )),
77 | ].map((x) => ({
78 | location: x,
79 | document: parse(fs.readFileSync(path.resolve(x), 'utf8')),
80 | }))
81 |
82 | return await codegen({
83 | ...config,
84 | documents,
85 | schema: parse(schema),
86 | })
87 | }
88 |
89 | export default async function main(
90 | schemaPattern,
91 | documentsPattern = '**/[a-z]*.(graphql|gql)',
92 | { cache, output } = { cache: true }
93 | ) {
94 | debug(`called with:`, { schemaPattern, documentsPattern, cache, output })
95 |
96 | if (cache) {
97 | if (output) throw new Error(`Options 'cache' and 'output' are mutually exclusive`)
98 |
99 | output = locateCache('sdk.ts')
100 | fs.rmSync(locateCache(), { force: true, recursive: true })
101 | }
102 |
103 | const schema = await push(await schemaPattern, { puke: true })
104 |
105 | debug(`Generating TypeScript SDK`)
106 | const sdk = `// Temporary workaround for issue microsoft/TypeScript#47663
107 | // Solution found at https://github.com/microsoft/TypeScript/issues/47663#issuecomment-1270716220
108 | import type {} from 'graphql';
109 |
110 | ${await generateSdk(schema, await documentsPattern)}
111 |
112 | /**
113 | *
114 | * 💸 This schema was generated in the cloud at the expense of the Brainyduck maintainers 📉
115 | *
116 | * 😇 Please kindly consider giving back to the Brainyduck community 😇
117 | *
118 | * 🐥🙏 The DUCK needs your help to spread his word to the world! 🙏🐥
119 | *
120 | * https://duck.brainy.sh
121 | * https://github.com/sponsors/zvictor
122 | *
123 | * 🌟💎🎆 [THIS SPACE IS AVAILABLE FOR ADVERTISING AND SPONSORSHIP] 🎆💎🌟
124 | *
125 | **/
126 | export default function brainyduck({
127 | secret = process?.env.FAUNA_SECRET,
128 | endpoint = process?.env.FAUNA_ENDPOINT,
129 | } = {}) {
130 | if (!secret) {
131 | throw new Error('SDK requires a secret to be defined.')
132 | }
133 |
134 | return getSdk(
135 | new GraphQLClient(endpoint || 'https://graphql.fauna.com/graphql', {
136 | headers: {
137 | authorization: secret && \`Bearer \${secret}\`,
138 | },
139 | })
140 | )
141 | }
142 |
143 | export { brainyduck }`
144 |
145 | if (!output) {
146 | return sdk
147 | }
148 |
149 | const outputDir = path.dirname(output)
150 |
151 | if (!fs.existsSync(outputDir)) {
152 | fs.mkdirSync(outputDir, { recursive: true })
153 | }
154 |
155 | fs.writeFileSync(output, sdk)
156 | debug(`The sdk has been stored at ${output}`)
157 |
158 | if (!cache) {
159 | return output
160 | }
161 |
162 | const tsconfigFile =
163 | process.env.BRAINYDUCK_TSCONFIG || path.join(__dirname, '..', 'tsconfig.json')
164 | const tmpTsconfigFile = locateCache('tsconfig.json')
165 |
166 | debug(`Transpiling sdk with tsconfig at ${tsconfigFile}`)
167 | debug(`Caching files at ${locateCache()}`)
168 |
169 | if (!fs.existsSync(tsconfigFile)) {
170 | throw new Error(`The tsconfig file you specified does not exist.`)
171 | }
172 |
173 | if (!fs.existsSync(locateCache())) {
174 | fs.mkdirSync(locateCache(), { recursive: true })
175 | }
176 |
177 | fs.writeFileSync(
178 | tmpTsconfigFile,
179 | `{
180 | "extends": "${tsconfigFile}", "include": ["${output}"], "compilerOptions": {
181 | "outDir": "${locateCache()}",
182 | ${
183 | /*
184 | Fix for the error TS2742: `The inferred type of "X" cannot be named without a reference to "Y". This is likely not portable. A type annotation is necessary.`
185 | https://github.com/microsoft/TypeScript/issues/42873#issuecomment-1131425209
186 | */ ''
187 | }
188 | "baseUrl": "${path.join(__dirname, '..')}",
189 | "paths": { "*": ["node_modules/*/"]}
190 | }
191 | }`
192 | )
193 |
194 | const { stdout } = execaSync(
195 | findBin(`tsup`),
196 | [
197 | output,
198 | '--config',
199 | path.join(__dirname, '..', 'tsup.config.ts'),
200 | '--out-dir',
201 | locateCache(),
202 | '--tsconfig',
203 | tmpTsconfigFile,
204 | ],
205 | {
206 | stdio: ['ignore', 'pipe', process.stderr],
207 | cwd: path.join(__dirname, '..'),
208 | }
209 | )
210 | debug(stdout)
211 |
212 | debug(`The sdk has been transpiled and cached`)
213 | return output
214 | }
215 |
216 | if (process.argv[1] === fileURLToPath(import.meta.url)) {
217 | const [schemaPattern, documentsPattern, output] = process.argv.slice(2)
218 |
219 | ;(async () => {
220 | const location = await main(
221 | schemaPattern === '-' ? pipeData() : schemaPattern,
222 | documentsPattern === '-' ? pipeData() : documentsPattern,
223 | output && { output }
224 | )
225 |
226 | console.log(`The sdk has been saved at ${location}`)
227 | process.exit(0)
228 | })()
229 | }
230 |
--------------------------------------------------------------------------------
/commands/deploy-functions.js:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env node
2 |
3 | import fs from 'fs'
4 | import path from 'path'
5 | import _debug from 'debug'
6 | import faunadb from 'faunadb'
7 | import figures from 'figures'
8 | import logSymbols from 'log-symbols'
9 | import { fileURLToPath } from 'url'
10 | import { patterns, faunaClient, patternMatch, runFQL } from '../utils.js'
11 |
12 | const { query: q } = faunadb
13 | const debug = _debug('brainyduck:deploy-functions')
14 |
15 | export default async function main(pattern = patterns.UDF) {
16 | debug(`Looking for files matching '${pattern}'`)
17 | const files = await patternMatch(pattern)
18 |
19 | if (!files.length) {
20 | throw new Error(`No matching file could be found`)
21 | }
22 |
23 | return await Promise.all(
24 | files.map(async (file) => {
25 | debug(`\t${figures.pointer} found ${file}`)
26 | const name = path.basename(file, path.extname(file))
27 | const content = fs.readFileSync(file).toString('utf8')
28 | const replacing = await faunaClient().query(q.IsFunction(q.Function(name)))
29 |
30 | debug(`${replacing ? 'Replacing' : 'Creating'} function '${name}' from file ${file}:`)
31 |
32 | // Remove comments.
33 | // Regex based on: https://stackoverflow.com/a/17791790/599991
34 | // Playground: https://regex101.com/r/IlsODE/3
35 | let query = content.replace(
36 | /(("[^"\\]*(?:\\.[^"\\]*)*")|('[^'\\]*(?:\\.[^'\\]*)*'))|#[^\n]*/gm,
37 | (match, p1, p2, p3, offset, string) => p1 || ''
38 | )
39 |
40 | // converts simplified definitions into extended definitions
41 | if (!query.match(/^[\s]*\{/)) {
42 | query = `{ name: "${name}", body:\n${query}\n}`
43 | }
44 |
45 | // infer function name only if it has not been declared
46 | // Playground: https://regex101.com/r/iRjGBj/1
47 | if (!query.match(/(? {
69 | if (process.env.BRAINYDUCK_OVERWRITE) {
70 | const { default: reset } = await import('./reset.js')
71 | await reset({ functions: true })
72 | }
73 |
74 | const refs = await main(pattern)
75 |
76 | console.log(
77 | `User-defined function(s) created or updated:`,
78 | refs.map((x) => x.name)
79 | )
80 |
81 | process.exit(0)
82 | })()
83 | }
84 |
--------------------------------------------------------------------------------
/commands/deploy-indexes.js:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env node
2 |
3 | import fs from 'fs'
4 | import path from 'path'
5 | import _debug from 'debug'
6 | import figures from 'figures'
7 | import faunadb from 'faunadb'
8 | import logSymbols from 'log-symbols'
9 | import { fileURLToPath } from 'url'
10 | import { faunaClient, patternMatch, patterns, runFQL } from '../utils.js'
11 |
12 | const { query: q } = faunadb
13 | const debug = _debug('brainyduck:deploy-indexes')
14 |
15 | export default async function main(pattern = patterns.INDEX) {
16 | debug(`Looking for files matching '${pattern}'`)
17 | const files = await patternMatch(pattern)
18 |
19 | if (!files.length) {
20 | throw new Error(`No matching file could be found`)
21 | }
22 |
23 | return await Promise.all(
24 | files.map(async (file) => {
25 | debug(`\t${figures.pointer} found ${file}`)
26 | const name = path.basename(file, path.extname(file))
27 | const content = fs.readFileSync(file).toString('utf8')
28 | const replacing = await faunaClient().query(q.IsIndex(q.Index(name)))
29 |
30 | debug(`${replacing ? 'Replacing' : 'Creating'} index '${name}' from file ${file}:`)
31 |
32 | // remove comments
33 | let query = content.replace(/#[^!].*$([\s]*)?/gm, '')
34 |
35 | // forbid simplified definitions (only available for UDFs)
36 | if (!query.match(/^[\s]*\{/)) {
37 | throw new Error(`Incorrect syntax used in index definition`)
38 | }
39 |
40 | // infer index name only if it has not been declared
41 | if (!query.includes('name:')) {
42 | query = query.replace('{', `{ name: "${name}", `)
43 | }
44 |
45 | if (name !== query.match(/name:[\s]*(['"])(.*?)\1/)[2]) {
46 | throw new Error(`File name does not match index name: ${name}`)
47 | }
48 |
49 | query = replacing ? `Update(Index('${name}'), ${query})` : `CreateIndex(${query})`
50 |
51 | const data = runFQL(query)
52 | debug(`${logSymbols.success} index has been created/updated: ${data.name}`)
53 |
54 | return data
55 | })
56 | )
57 | }
58 |
59 | if (process.argv[1] === fileURLToPath(import.meta.url)) {
60 | const [pattern] = process.argv.slice(2)
61 |
62 | ;(async () => {
63 | if (process.env.BRAINYDUCK_OVERWRITE) {
64 | const { default: reset } = await import('./reset.js')
65 | await reset({ indexes: true })
66 | }
67 |
68 | const refs = await main(pattern)
69 |
70 | console.log(
71 | `User-defined index(es) created or updated:`,
72 | refs.map((x) => x.name)
73 | )
74 |
75 | process.exit(0)
76 | })()
77 | }
78 |
--------------------------------------------------------------------------------
/commands/deploy-roles.js:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env node
2 |
3 | import fs from 'fs'
4 | import path from 'path'
5 | import _debug from 'debug'
6 | import figures from 'figures'
7 | import faunadb from 'faunadb'
8 | import logSymbols from 'log-symbols'
9 | import { fileURLToPath } from 'url'
10 | import { faunaClient, patternMatch, patterns, runFQL } from '../utils.js'
11 |
12 | const { query: q } = faunadb
13 | const debug = _debug('brainyduck:deploy-roles')
14 |
15 | export default async function main(pattern = patterns.UDR) {
16 | debug(`Looking for files matching '${pattern}'`)
17 | const files = await patternMatch(pattern)
18 |
19 | if (!files.length) {
20 | throw new Error(`No matching file could be found`)
21 | }
22 |
23 | return await Promise.all(
24 | files.map(async (file) => {
25 | debug(`\t${figures.pointer} found ${file}`)
26 | const name = path.basename(file, path.extname(file))
27 | const content = fs.readFileSync(file).toString('utf8')
28 | const replacing = await faunaClient().query(q.IsRole(q.Role(name)))
29 |
30 | debug(`${replacing ? 'Replacing' : 'Creating'} role '${name}' from file ${file}:`)
31 |
32 | // remove comments
33 | let query = content.replace(/#[^!].*$([\s]*)?/gm, '')
34 |
35 | // forbid simplified definitions (only available for UDFs)
36 | if (!query.match(/^[\s]*\{/)) {
37 | throw new Error(`Incorrect syntax used in role definition`)
38 | }
39 |
40 | // infer role name only if it has not been declared
41 | if (!query.includes('name:')) {
42 | query = query.replace('{', `{ name: "${name}", `)
43 | }
44 |
45 | if (name !== query.match(/name:[\s]*(['"])(.*?)\1/)[2]) {
46 | throw new Error(`File name does not match role name: ${name}`)
47 | }
48 |
49 | query = replacing ? `Update(Role('${name}'), ${query})` : `CreateRole(${query})`
50 |
51 | const data = runFQL(query)
52 | debug(`${logSymbols.success} role has been created/updated: ${data.name}`)
53 |
54 | return data
55 | })
56 | )
57 | }
58 |
59 | if (process.argv[1] === fileURLToPath(import.meta.url)) {
60 | const [pattern] = process.argv.slice(2)
61 |
62 | ;(async () => {
63 | if (process.env.BRAINYDUCK_OVERWRITE) {
64 | const { default: reset } = await import('./reset.js')
65 | await reset({ roles: true })
66 | }
67 |
68 | const refs = await main(pattern)
69 |
70 | console.log(
71 | `User-defined role(s) created or updated:`,
72 | refs.map((x) => x.name)
73 | )
74 |
75 | process.exit(0)
76 | })()
77 | }
78 |
--------------------------------------------------------------------------------
/commands/deploy-schemas.js:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env node
2 |
3 | import fs from 'fs'
4 | import path from 'path'
5 | import _debug from 'debug'
6 | import figures from 'figures'
7 | import { fileURLToPath } from 'url'
8 | import { patternMatch, importSchema } from '../utils.js'
9 |
10 | const debug = _debug('brainyduck:deploy-schemas')
11 |
12 | const extendTypes = (schema) => {
13 | const regexp = /^[\s]*(?!#)[\s]*extend[\s]+type[\s]+([^\s]+)[\s]*\{([^\}]*)}/gm
14 |
15 | for (const [raw, name, content] of schema.matchAll(regexp)) {
16 | schema = schema
17 | .replace(raw, '')
18 | .replace(new RegExp(`(? {
31 | debug(`Looking for schemas matching '${pattern}'`)
32 |
33 | const files = (await patternMatch(Array.isArray(pattern) ? pattern : pattern.split(','))).map(
34 | (x) => path.resolve(x)
35 | )
36 |
37 | if (!files.length) {
38 | throw new Error(`No matching file could be found`)
39 | }
40 |
41 | const content = files.map((x) => {
42 | debug(`\t${figures.pointer} found ${x}`)
43 | return fs.readFileSync(x)
44 | })
45 |
46 | return content.join('\n')
47 | }
48 |
49 | export default async function main(inputPath = '**/[A-Z]*.(graphql|gql)', { override, puke } = {}) {
50 | debug(`called with:`, { inputPath, override, puke })
51 | const schema = extendTypes(await loadSchema(inputPath))
52 |
53 | const prettySchema = schema.replace(/^/gm, '\t')
54 | debug(`The resulting merged schema:\n${prettySchema}`)
55 |
56 | try {
57 | return await importSchema(schema, { override, puke })
58 | } catch (error) {
59 | console.error(`The schema below could not be pushed to remote:\n\n${prettySchema}`)
60 |
61 | throw error
62 | }
63 | }
64 |
65 | if (process.argv[1] === fileURLToPath(import.meta.url)) {
66 | ;(async () => {
67 | const [inputPath] = process.argv.slice(2)
68 |
69 | if (process.env.BRAINYDUCK_OVERWRITE) {
70 | const { default: reset } = await import('./reset.js')
71 | await reset({ collections: true, schemas: true })
72 | }
73 |
74 | console.log(await main(inputPath))
75 | process.exit(0)
76 | })()
77 | }
78 |
--------------------------------------------------------------------------------
/commands/deploy.js:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env node
2 |
3 | import ora from 'ora'
4 | import _debug from 'debug'
5 | import { fileURLToPath } from 'node:url'
6 | import deployFunctions from './deploy-functions.js'
7 | import deployIndexes from './deploy-indexes.js'
8 | import deployRoles from './deploy-roles.js'
9 | import deploySchemas from './deploy-schemas.js'
10 | import { representData } from '../utils.js'
11 |
12 | const ALL_TYPES = {
13 | schemas: deploySchemas,
14 | indexes: deployIndexes,
15 | roles: deployRoles,
16 | functions: deployFunctions,
17 | }
18 |
19 | const debug = _debug('brainyduck:deploy')
20 |
21 | const deploy = async (type) => {
22 | const spinner = ora(`Deploying ${type}...`).start()
23 |
24 | try {
25 | const operation = ALL_TYPES[type]
26 | let data
27 |
28 | try {
29 | data = await operation()
30 | } catch (error) {
31 | if (error.message !== `No matching file could be found`) {
32 | throw error
33 | }
34 |
35 | return spinner.info(`No ${type} to deploy`)
36 | }
37 |
38 | if (!data || !data.length) {
39 | return spinner.fail(`Nothing was deployed of type '${type}'`)
40 | }
41 |
42 | spinner.succeed(`${type} have been deployed!`)
43 | console.log(`${type}:`, type === 'schemas' ? data : representData(data), '\n')
44 |
45 | return data
46 | } catch (e) {
47 | spinner.fail(`${type} deployment has failed`)
48 | throw e
49 | }
50 | }
51 |
52 | export default async function main(types = ALL_TYPES) {
53 | const _types = Object.keys(types).filter((key) => types[key])
54 | console.log(`The following types are about to be deployed:`, _types)
55 |
56 | for (const type of Object.keys(ALL_TYPES)) {
57 | if (!_types.includes(type)) {
58 | debug(`Skipping ${type}`)
59 | continue
60 | }
61 |
62 | await deploy(type)
63 | }
64 | }
65 |
66 | if (process.argv[1] === fileURLToPath(import.meta.url)) {
67 | ;(async () => {
68 | const types =
69 | process.argv[2] && Object.fromEntries(process.argv[2].split(',').map((type) => [type, true]))
70 |
71 | if (process.env.BRAINYDUCK_OVERWRITE) {
72 | const { default: reset } = await import('./reset.js')
73 | await reset(types)
74 | }
75 |
76 | await main(types)
77 |
78 | console.log(`\n\nAll done! All deployments have been successful 🦆`)
79 | process.exit(0)
80 | })()
81 | }
82 |
--------------------------------------------------------------------------------
/commands/dev.js:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env node
2 |
3 | const scream = (e) => {
4 | console.error(e.stack || e)
5 |
6 | if (e.message === `missing brainyduck's secret`) {
7 | process.exit(1)
8 | }
9 | }
10 |
11 | process.on('unhandledRejection', scream)
12 | process.on('uncaughtException', scream)
13 |
14 | import ora from 'ora'
15 | import path from 'path'
16 | import _debug from 'debug'
17 | import PQueue from 'p-queue'
18 | import chokidar from 'chokidar'
19 | import { execaSync } from 'execa'
20 | import { fileURLToPath } from 'url'
21 | import deployFunctions from './deploy-functions.js'
22 | import deployIndexes from './deploy-indexes.js'
23 | import deployRoles from './deploy-roles.js'
24 | import deploySchemas from './deploy-schemas.js'
25 | import build from './build.js'
26 | import { patterns, ignored } from '../utils.js'
27 |
28 | const debug = _debug('brainyduck:watcher')
29 |
30 | const queue = new PQueue({ autoStart: false, concurrency: 1 })
31 | const lock = {}
32 |
33 | const block = (type, file) => {
34 | lock[type] = lock[type] || []
35 | lock[type].push(file)
36 | }
37 |
38 | const unblock = (type) => {
39 | lock[type] = false
40 | }
41 |
42 | const runCallback = () => {
43 | if (!process.env.CALLBACK) return
44 |
45 | console.log(`Running callback '${process.env.CALLBACK}':`)
46 | const cmd = process.env.CALLBACK.split(' ')
47 |
48 | execaSync(cmd.shift(), cmd, {
49 | stdio: ['ignore', process.stdout, process.stderr],
50 | cwd: process.cwd(),
51 | })
52 |
53 | console.log('')
54 | }
55 |
56 | const processor = (type, operation, file, cumulative) => {
57 | if (lock[type]) {
58 | return block(type, file)
59 | }
60 |
61 | if (cumulative) {
62 | block(type, file)
63 | }
64 |
65 | queue.add(async () => {
66 | const filesList = (lock[type] || [file])
67 | .map((x) => path.relative(process.cwd(), x))
68 | .sort()
69 | .join(', ')
70 |
71 | unblock(type)
72 |
73 | if (!operation) {
74 | return debug(`Ignoring file(s) ${file} [${type}] (no operation defined)`)
75 | }
76 |
77 | const spinner = ora(`Processing ${filesList} [${type}]\n`).start()
78 |
79 | try {
80 | await operation(file)
81 | spinner.succeed(`Processed ${filesList} [${type}]`)
82 | } catch (e) {
83 | spinner.fail()
84 | console.error(e)
85 | }
86 | })
87 | }
88 |
89 | const watch = (type, pattern, operation, cumulative) =>
90 | new Promise((resolve) => {
91 | const directory = process.cwd()
92 |
93 | if (process.env.BRAINYDUCK_ONLY_CHANGES) {
94 | debug(`Watching ${type} changes but ignoring initial files`)
95 | }
96 |
97 | chokidar
98 | .watch(pattern, {
99 | ignoreInitial: Boolean(process.env.BRAINYDUCK_ONLY_CHANGES),
100 | ignored: [/(^|[\/\\])\../, ...ignored],
101 | persistent: true,
102 | cwd: path.resolve(directory),
103 | })
104 | .on('error', (error) => debug(`error: ${error}`))
105 | .on('add', (file) => {
106 | file = path.join(directory, file)
107 |
108 | debug(`Watching ${file} [${type}]`)
109 | operation && processor(type, operation, file, cumulative)
110 | })
111 | .on('change', (file) => {
112 | file = path.join(directory, file)
113 |
114 | debug(`${file} has been changed [${type}]`)
115 | processor(type, operation, file, cumulative)
116 | })
117 | .on('ready', resolve)
118 | })
119 |
120 | export default async function main() {
121 | const ts = await watch('Typescript', patterns.TS, null, true)
122 |
123 | // const schema = await watch('Schema', patterns.SCHEMA, (file) =>
124 | // generateTypes(file, file.replace(/(.gql|.graphql)$/, '$1.d.ts'))
125 | // )
126 |
127 | const schema = await watch(
128 | 'Schema',
129 | patterns.SCHEMA,
130 | async () => {
131 | await build(patterns.SCHEMA, patterns.DOCUMENTS)
132 | await deploySchemas(patterns.SCHEMA, { override: true })
133 | },
134 | true
135 | )
136 |
137 | const index = await watch('Index', patterns.INDEX, deployIndexes)
138 |
139 | const udr = await watch('UDR', patterns.UDR, deployRoles)
140 |
141 | const udf = await watch('UDF', patterns.UDF, deployFunctions)
142 |
143 | const documents = await watch(
144 | 'Document',
145 | patterns.DOCUMENTS,
146 | () => build(patterns.SCHEMA, patterns.DOCUMENTS),
147 | true
148 | )
149 |
150 | debug('Initial scan complete')
151 |
152 | if (process.env.BRAINYDUCK_NO_WATCH) {
153 | queue.onIdle().then(() => {
154 | runCallback()
155 |
156 | console.log('All operations complete')
157 | process.exit(0)
158 | })
159 | } else {
160 | let started = false
161 |
162 | const spinner = ora({
163 | text: `All done! Waiting for new file changes 🦆`,
164 | prefixText: '\n',
165 | spinner: 'bounce',
166 | })
167 |
168 | queue.on('active', () => {
169 | started = true
170 | spinner.stop()
171 | })
172 |
173 | queue.on('idle', () => {
174 | if (started) {
175 | runCallback()
176 | }
177 |
178 | spinner.start()
179 | })
180 | }
181 |
182 | queue.start()
183 | }
184 |
185 | if (process.argv[1] === fileURLToPath(import.meta.url)) {
186 | const [directory] = process.argv.slice(2)
187 |
188 | ;(async () => {
189 | if (process.env.BRAINYDUCK_OVERWRITE) {
190 | const { default: reset } = await import('./reset.js')
191 | await reset()
192 | }
193 |
194 | if (directory) {
195 | process.chdir(directory)
196 | debug(`Changed directory to ${process.cwd()}`)
197 | }
198 |
199 | await main()
200 | })()
201 | }
202 |
--------------------------------------------------------------------------------
/commands/export.js:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env node
2 |
3 | import fs from 'fs-extra'
4 | import path from 'path'
5 | import _debug from 'debug'
6 | import { fileURLToPath } from 'url'
7 | import { locateCache } from '../utils.js'
8 |
9 | const debug = _debug('brainyduck:export')
10 |
11 | export default async function main(destination) {
12 | debug(`called with:`, { destination })
13 |
14 | if (!destination) {
15 | throw new Error(`Please provide a destination for your package`)
16 | }
17 |
18 | if (!fs.existsSync(locateCache(`sdk.mjs`))) {
19 | throw new Error(`Please run the 'build' command before running 'export'`)
20 | }
21 |
22 | if (fs.existsSync(destination) && fs.readdirSync(destination).length > 0) {
23 | throw new Error(`Destination '${destination}' already exists and is not empty`)
24 | }
25 |
26 | fs.copySync(locateCache(`.`), destination, {
27 | // filter: (src) => console.log(src) || !src.includes('/.'),
28 | })
29 |
30 | fs.writeFileSync(
31 | path.join(destination, 'package.json'),
32 | `{
33 | "name": "brainyduck-sdk",
34 | "version": "1.0.0",
35 | "type": "module",
36 | "exports": {
37 | ".": {
38 | "types": "./sdk.d.ts",
39 | "import": "./sdk.mjs",
40 | "require": "./sdk.cjs"
41 | }
42 | },
43 | "main": "./sdk.cjs",
44 | "types": "./sdk.d.ts",
45 | "bundleDependencies": true,
46 | "peerDependencies": {
47 | "graphql-request": "latest",
48 | "graphql-tag": "latest"
49 | }
50 | }`
51 | )
52 | debug(`The sdk has been exported at ${destination}`)
53 |
54 | return destination
55 | }
56 |
57 | if (process.argv[1] === fileURLToPath(import.meta.url)) {
58 | const [destination] = process.argv.slice(2)
59 |
60 | ;(async () => {
61 | const location = await main(destination)
62 |
63 | console.log(`The package has been saved at ${location}`)
64 | process.exit(0)
65 | })()
66 | }
67 |
--------------------------------------------------------------------------------
/commands/pack.js:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env node
2 |
3 | import path from 'path'
4 | import _debug from 'debug'
5 | import { execaSync } from 'execa'
6 | import { fileURLToPath } from 'url'
7 | import { temporaryDirectory } from 'tempy'
8 | import exportIt from './export.js'
9 |
10 | const debug = _debug('brainyduck:pack')
11 |
12 | export default async function main() {
13 | const destination = temporaryDirectory()
14 | debug(`packing at:`, { destination }, process.cwd())
15 |
16 | await exportIt(destination)
17 |
18 | execaSync(`npm`, ['pack', '--pack-destination', process.cwd()], {
19 | cwd: destination,
20 | stdio: ['ignore', 'ignore', process.stderr],
21 | })
22 |
23 | return path.join(process.cwd(), `brainyduck-sdk-1.0.0.tgz`)
24 | }
25 |
26 | if (process.argv[1] === fileURLToPath(import.meta.url)) {
27 | const [destination] = process.argv.slice(2)
28 |
29 | ;(async () => {
30 | const location = await main(destination)
31 |
32 | console.log(`The package has been compressed and saved at ${location}`)
33 | process.exit(0)
34 | })()
35 | }
36 |
--------------------------------------------------------------------------------
/commands/pull-schema.js:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env node
2 |
3 | import fs from 'fs'
4 | import path from 'path'
5 | import _debug from 'debug'
6 | import { print } from 'graphql'
7 | import { performance } from 'perf_hooks'
8 | import { fileURLToPath } from 'url'
9 | import { loadTypedefs, OPERATION_KINDS } from '@graphql-tools/load'
10 | import { UrlLoader } from '@graphql-tools/url-loader'
11 | import { mergeTypeDefs } from '@graphql-tools/merge'
12 | import { graphqlEndpoint, loadSecret } from '../utils.js'
13 |
14 | const debug = _debug('brainyduck:pull-schema')
15 |
16 | const options = {
17 | loaders: [new UrlLoader()],
18 | filterKinds: OPERATION_KINDS,
19 | sort: false,
20 | forceGraphQLImport: true,
21 | useSchemaDefinition: false,
22 | headers: {
23 | Authorization: `Bearer ${loadSecret()}`,
24 | },
25 | }
26 |
27 | const loadSchema = async (url) => {
28 | debug(`Pulling the schema from '${url}'`)
29 | const typeDefs = await loadTypedefs(url, options).catch((err) => {
30 | if (
31 | err.message.includes('Must provide schema definition with query type or a type named Query.')
32 | ) {
33 | console.warn(`Please make sure you have pushed a valid schema before trying to pull it back.`)
34 | throw new Error(`Invalid schema retrieved: missing type Query`)
35 | }
36 |
37 | throw err
38 | })
39 | debug(`${typeDefs.length} typeDef(s) found`)
40 |
41 | if (!typeDefs || !typeDefs.length) {
42 | throw new Error('no schema found')
43 | }
44 |
45 | const mergedDocuments = print(
46 | mergeTypeDefs(
47 | typeDefs.map((r) => r.document),
48 | options
49 | )
50 | )
51 |
52 | return typeof mergedDocuments === 'string'
53 | ? mergedDocuments
54 | : mergedDocuments && print(mergedDocuments)
55 | }
56 |
57 | export default async function main(outputPath) {
58 | debug(`called with:`, { outputPath })
59 | const t0 = performance.now()
60 | const schema = await loadSchema(graphqlEndpoint.server)
61 | debug(`The call to fauna took ${performance.now() - t0} milliseconds.`)
62 |
63 | if (outputPath) {
64 | fs.writeFileSync(outputPath, schema)
65 | debug(`The schema has been stored at '${outputPath}'`)
66 | }
67 |
68 | return schema
69 | }
70 |
71 | if (process.argv[1] === fileURLToPath(import.meta.url)) {
72 | const [outputPath] = process.argv.slice(2)
73 |
74 | ;(async () => {
75 | const schema = await main(outputPath && path.resolve(outputPath))
76 |
77 | if (!outputPath) {
78 | console.log(schema)
79 | }
80 |
81 | process.exit(0)
82 | })()
83 | }
84 |
--------------------------------------------------------------------------------
/commands/reset.js:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env node
2 |
3 | import fs from 'fs'
4 | import ora from 'ora'
5 | import path from 'path'
6 | import chalk from 'chalk'
7 | import { fileURLToPath } from 'node:url'
8 | import { runFQL, importSchema, representData, question } from '../utils.js'
9 |
10 | const ALL_TYPES = {
11 | functions: true,
12 | indexes: true,
13 | roles: true,
14 | documents: true,
15 | collections: true,
16 | databases: true,
17 | schemas: true,
18 | }
19 |
20 | const readScript = (name) =>
21 | fs.readFileSync(new URL(path.join(`../scripts/`, name), import.meta.url), { encoding: 'utf8' })
22 |
23 | const confirm = async (types = ALL_TYPES) => {
24 | const listOfTypes = chalk.red.bold(Object.keys(types).join(', '))
25 |
26 | console.warn(
27 | `\n\nYou are about to wipe out all the ${listOfTypes} from the database associated to the key you provided.`
28 | )
29 | console.warn(`This action is irreversible and might possibly affect production data.\n\n`)
30 |
31 | if (process.env.BRAINYDUCK_FORCE) {
32 | console.warn(`Not asking for confirmation because you are using forced mode`)
33 | return true
34 | }
35 |
36 | const answer = await question(
37 | chalk.bold(`Are you sure you want to delete all the ${listOfTypes}? [y/N] `)
38 | )
39 |
40 | return answer === 'y'
41 | }
42 |
43 | const reset = (type) => {
44 | const spinner = ora(`Wiping out ${type}...`).start()
45 |
46 | try {
47 | const { data } = runFQL(readScript(`reset.${type}.fql`))
48 |
49 | if (!data || !data.length) {
50 | return spinner.succeed(`No data was deleted of type '${type}'`)
51 | }
52 |
53 | spinner.succeed(`${type} cleared out`)
54 | console.log('deleted:', representData(data.map((x) => x.data || x)))
55 |
56 | return data
57 | } catch (e) {
58 | spinner.fail(`${type} reset failed`)
59 | throw e
60 | }
61 | }
62 |
63 | export default async function main(types = ALL_TYPES) {
64 | const _types = Object.keys(types).filter((key) => types[key])
65 | console.log(`The following types are about to be deleted:`, _types)
66 |
67 | if (types.schemas) {
68 | const spinner = ora(`Wiping out the graphql schema...`).start()
69 |
70 | try {
71 | await importSchema(`enum Brainyduck { RESETTING }`)
72 | spinner.succeed(`Graphql schema has been reset.`)
73 | } catch (e) {
74 | spinner.fail()
75 | throw e
76 | }
77 | }
78 |
79 | for (const type of _types) {
80 | reset(type)
81 | }
82 | }
83 |
84 | if (process.argv[1] === fileURLToPath(import.meta.url)) {
85 | ;(async () => {
86 | const types =
87 | process.argv[2] && Object.fromEntries(process.argv[2].split(',').map((type) => [type, true]))
88 |
89 | if (!(await confirm(types))) {
90 | return console.log('Wise decision! 🧑🌾')
91 | }
92 |
93 | await main(types)
94 |
95 | console.log(`All reset operations have succeeded.`)
96 | process.exit(0)
97 | })()
98 | }
99 |
--------------------------------------------------------------------------------
/docs/.nojekyll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/zvictor/brainyduck/35dc71063f83786dc97082d8583ea8872dd8377e/docs/.nojekyll
--------------------------------------------------------------------------------
/docs/CNAME:
--------------------------------------------------------------------------------
1 | duck.brainy.sh
--------------------------------------------------------------------------------
/docs/README.md:
--------------------------------------------------------------------------------
1 | ## Introduction
2 |
3 | Brainyduck helps you transition your backend to a top notch serverless environment while keeping the developer experience neat! 🌈🍦🐥
4 |
5 | Worry not about new and complex setup and deployment requisites: The graphql schemas you already have is all you need to build a world-class & reliable endpoint.
6 |
7 | Just run `npx brainyduck` on your schemas and the times in which you had to manually setup your backend will forever be gone! Never find yourself redefining types in multiple files, ever again. 🥹
8 |
9 | 
10 |
11 | ## Features
12 |
13 | #### Code generation
14 |
15 | - ⚡️ Auto generated APIs with small footprint.
16 | - 👮🏼 The generated code is written in TypeScript, with full support for types.
17 | - ⛰ Schemas are [expanded to provide basic CRUD](https://docs.fauna.com/fauna/current/api/graphql/schemas) automatically (_i.e. no need to define resolvers for basic operations!_).
18 | - 🔎 Validation of required and non-nullable fields against provided data.
19 |
20 | #### Backend (by Fauna)
21 |
22 | - 🦄 All the data persists on a [next-gen data backend](https://docs.fauna.com/fauna/current/introduction) 🤘.
23 | - 👨👩👦👦 Support for [relationships between documents](https://docs.fauna.com/fauna/current/learn/tutorials/graphql/relations/), within the schema definition.
24 | - 🔒 [Authentication and access control security](https://docs.fauna.com/fauna/current/security/) at the data level (including [Attribute-based access control (ABAC)](https://docs.fauna.com/fauna/current/security/abac)).
25 |
26 |
27 | #### The library
28 |
29 | - ✅ Well-tested.
30 | - 🐻 Easy to add to your new or existing projects.
31 | - 👀 Quite a few examples in the [./examples](https://github.com/zvictor/brainyduck/tree/master/examples) folder.
32 |
33 |
34 |
Read more
35 |
36 | Given a GraphQL schema looking anything like this:
37 |
38 | ```graphql
39 | type User {
40 | username: String! @unique
41 | }
42 |
43 | type Post {
44 | content: String!
45 | author: User!
46 | }
47 | ```
48 |
49 | Brainyduck will give you:
50 |
51 | 1. Your schema will be expanded to provide basic CRUD out of the box. Expect it to become something like this:
52 |
53 | ```graphql
54 | type Query {
55 | findPostByID(id: ID!): Post
56 | findUserByID(id: ID!): User
57 | }
58 |
59 | type Mutation {
60 | updateUser(id: ID!, data: UserInput!): User
61 | createUser(data: UserInput!): User!
62 | updatePost(id: ID!, data: PostInput!): Post
63 | deleteUser(id: ID!): User
64 | deletePost(id: ID!): Post
65 | createPost(data: PostInput!): Post!
66 | }
67 |
68 | type Post {
69 | author: User!
70 | _id: ID!
71 | content: String!
72 | title: String!
73 | }
74 |
75 | type User {
76 | _id: ID!
77 | username: String!
78 | }
79 |
80 | input PostInput {
81 | title: String!
82 | content: String!
83 | author: PostAuthorRelation
84 | }
85 |
86 | input UserInput {
87 | username: String!
88 | }
89 |
90 | # ... plus few other less important definitions such as relations and pagination
91 | ```
92 |
93 | 2. Do you like TypeScript? Your schema will also be exported as TS types.
94 |
95 | ```typescript
96 | export type Query = {
97 | __typename?: 'Query'
98 | /** Find a document from the collection of 'Post' by its id. */
99 | findPostByID?: Maybe
100 | /** Find a document from the collection of 'User' by its id. */
101 | findUserByID?: Maybe
102 | }
103 |
104 | export type Mutation = {
105 | __typename?: 'Mutation'
106 | /** Update an existing document in the collection of 'User' */
107 | updateUser?: Maybe
108 | /** Create a new document in the collection of 'User' */
109 | createUser: User
110 | /** Update an existing document in the collection of 'Post' */
111 | updatePost?: Maybe
112 | /** Delete an existing document in the collection of 'User' */
113 | deleteUser?: Maybe
114 | /** Delete an existing document in the collection of 'Post' */
115 | deletePost?: Maybe
116 | /** Create a new document in the collection of 'Post' */
117 | createPost: Post
118 | }
119 |
120 | export type Post = {
121 | __typename?: 'Post'
122 | author: User
123 | /** The document's ID. */
124 | _id: Scalars['ID']
125 | content: Scalars['String']
126 | title: Scalars['String']
127 | }
128 |
129 | export type User = {
130 | __typename?: 'User'
131 | /** The document's ID. */
132 | _id: Scalars['ID']
133 | username: Scalars['String']
134 | }
135 |
136 | // ... plus few other less important definitions such as relations and pagination
137 | ```
138 |
139 | 3. You will be able to abstract the GraphQL layer and make calls using a convenient API (with full autocomplete support!)
140 |
141 | ```typescript
142 | import brainyduck from 'brainyduck' // <-- automatically loads the SDK generated exclusively to your schema
143 |
144 | await brainyduck().createUser({ username: `rick-sanchez` }) // <-- TS autocomplete and type checking enabled!
145 | await brainyduck({ secret: 'different-access-token' }).createUser({ username: `morty-smith` }) // <-- Easily handle authentication and sessions by providing different credentials
146 |
147 | const { allUsers } = await brainyduck().allUsers()
148 |
149 | for (const user of allUsers.data) {
150 | console.log(user)
151 | }
152 |
153 | // output:
154 | //
155 | // { username: 'rick-sanchez' }
156 | // { username: 'morty-smith' }
157 | ```
158 |
159 | 4. The API can be used both on backend and frontend, as long as you are careful enough with your [secrets management](https://forums.fauna.com/t/do-i-need-a-backend-api-between-faunadb-and-my-app-what-are-the-use-cases-of-an-api/95/6?u=zvictor).
160 |
161 | **What else?**
162 |
163 | 1. Brainyduck stiches multiple graphql files together, so your codebase can embrace [modularization](https://github.com/zvictor/brainyduck/tree/master/examples/modularized).
164 | 2. Isn't basic CRUD enough? What about more complex custom resolvers? Brainyduck integrates well with [user-defined functions [UDF]](https://docs.fauna.com/fauna/current/api/graphql/functions), automatically keeping your functions in sync with fauna's backend.
165 |
166 |
167 | For more examples, please check our [examples directory](https://github.com/zvictor/brainyduck/tree/master/examples).
168 |
169 |
170 | 
171 |
172 | ## Getting started
173 |
174 | It takes only **3 steps to get started**:
175 |
176 | 1. Create a `.graphql` file defining your desired Graphql schema
177 | 2. Create or reuse a [fauna secret](https://github.com/zvictor/brainyduck/wiki/Fauna-secret)
178 | 3. In the same folder, run `npx brainyduck --secret `
179 |
180 | That's it! Now you can start importing and consuming your sdk with `import sdk from 'brainyduck'` 🐣🎉
181 |
182 | _Alternatively, you can:_
183 |
184 | - In any of our [examples](https://github.com/zvictor/brainyduck/tree/master/examples) folder, run `npx brainyduck --secret `
185 |
186 | | [Basic](https://github.com/zvictor/brainyduck/tree/master/examples/basic) | [Modularized](https://github.com/zvictor/brainyduck/tree/master/examples/modularized) | [with-UDF](https://github.com/zvictor/brainyduck/tree/master/examples/with-UDF) |
187 | | :---------------------------------------------------------------------------------------------------------------------------------------------: | :---------------------------------------------------------------------------------------------------------------------------------------------------------: | :---------------------------------------------------------------------------------------------------------------------------------------------------: |
188 | | [](https://asciinema.org/a/361576) | [](https://asciinema.org/a/361562) | [](https://asciinema.org/a/361573) |
189 | | |
190 |
191 | 
192 |
193 | ## Installation
194 |
195 | You can install it globally, per project or just run it on demand:
196 |
197 | ```bash
198 | # npm, globally:
199 | $ npm install -g brainyduck
200 |
201 | # npm, project-only:
202 | $ npm i brainyduck -D
203 |
204 | # or run on demand:
205 | $ npx brainyduck
206 | ```
207 |
208 | 
209 |
210 | ## Usage
211 |
212 | ```markup
213 | Usage: brainyduck [options] [command]
214 |
215 | Options:
216 | -V, --version output the version number
217 | -s, --secret set Fauna's secret key, used to deploy data to your database (defaults to ).
218 | --domain FaunaDB server domain (defaults to ).
219 | --port Connection port (defaults to ).
220 | --graphql-domain Graphql server domain (defaults to ).
221 | --graphql-port Graphql connection port (defaults to ).
222 | --scheme Connection scheme (defaults to ).
223 | --overwrite wipe out data related to the command before its execution
224 | --no-operations-generation disable the auto-generated operations documents.
225 | -f, --force skip prompt confirmations (defaults to set glob patterns to exclude matches (defaults to ).
227 | --no-watch disable the files watcher (only used in the dev command).
228 | --only-changes ignore initial files and watch changes ONLY (only used in the dev command).
229 | --callback run external command after every execution completion (only used in the dev command).
230 | --tsconfig use a custom tsconfig file for the sdk transpilation.
231 | --verbose run the command with verbose logging.
232 | --debug [port] run the command with debugging listening on [port].
233 | --debug-brk [port] run the command with debugging(-brk) listening on [port].
234 | -h, --help display help for command
235 |
236 | Commands:
237 | build [schemas-pattern] [documents-pattern] [output] code generator that creates an easily accessible API. Defaults: [schemas-pattern: **/[A-Z]*.(graphql|gql), documents-pattern: **/[a-z]*.(graphql|gql) output: ]
238 | export [destination] export the built module as an independent node package
239 | pack create a tarball from the built module
240 | dev [directory] build, deploy and watch for changes. Defaults: [directory: ]
241 | deploy [types] deploy the local folder to your database. Defaults: [types: schemas,functions,indexes,roles]
242 | deploy-schemas [pattern] push your schema to faunadb. Defaults: [pattern: **/*.(graphql|gql)]
243 | deploy-functions [pattern] upload your User-Defined Functions (UDF) to faunadb. Defaults: [pattern: **/*.udf]
244 | deploy-indexes [pattern] upload your User-Defined Indexes to faunadb. Defaults: [pattern: **/*.index]
245 | deploy-roles [pattern] upload your User-Defined Roles (UDR) to faunadb. Defaults: [pattern: **/*.role]
246 | pull-schema [output] load the schema hosted in faunadb. Defaults: [output: ]
247 | reset [types] wipe out all data in the database {BE CAREFUL!}. Defaults: [types: functions,indexes,roles,documents,collections,databases,schemas]
248 | help [command] display help for command
249 | ```
250 |
251 | 
252 |
253 | ## Commands
254 |
255 | Commands with the **operative** badge require a `--secret` value to be passed along and are designed to make changes to the database associated to the given key.
256 |
257 | Using a wrong secret can have unintended and possibly drastic effects on your data. So please **make sure you are using the right secret whenever running a command!**
258 | ### build
259 |
260 | Throw graphql schemas in and get a well typed api back. Simple like that!
261 |
262 | After running `build` you can add `import sdk from 'brainyduck'` statements in your code and run queries against your database directly.
263 |
264 | CLI:
265 | ```shell
266 | npx brainyduck build [schema-pattern] [documents-pattern] [output]
267 | ```
268 |
269 | Defaults:
270 | * _schema-pattern_: `**/[A-Z]*.(graphql|gql)`
271 | * _documents-pattern_: `**/[a-z]*.(graphql|gql)`
272 | * _output_: ``
273 |
274 | ### export
275 |
276 | Sometimes you want to have the sdk on it's own node package, usually to increase portability or reusability.
277 |
278 | After running `export` you can `npm publish` it or send the files somewhere else.
279 |
280 | CLI:
281 | ```shell
282 | npx brainyduck export [destination]
283 | ```
284 | ### pack
285 |
286 | Create a tarball from the built module
287 |
288 | CLI:
289 | ```shell
290 | npx brainyduck pack
291 | ```
292 |
293 | ### dev
294 |
295 |
296 | [Builds](#build), [deploys](#deploy) (override mode), and watches for changes.
297 |
298 | This is usually the command you run when you are developing locally, **never the command you run against production**.
299 | You are recommended to create a secret in a new database, just for the dev environment, before running this command.
300 |
301 | CLI:
302 | ```shell
303 | npx brainyduck dev [directory]
304 | ```
305 |
306 | Defaults:
307 | * _directory_: ``
308 |
309 |
310 | ### deploy
311 |
312 |
313 | Deploys [schemas](#deploy-schemas) (merge mode), [functions](#deploy-functions), [indexes](#deploy-indexes), and [roles](#deploy-roles).
314 | This is usually the command you run when you have finished developing locally and want to ship to production. Just remember to use the right `--secret` value for that.
315 |
316 | CLI:
317 | ```shell
318 | npx brainyduck deploy [types]
319 | ```
320 |
321 | Defaults:
322 | * _types_: `schemas,functions,indexes,roles`
323 |
324 | ### deploy-schemas
325 |
326 |
327 | Deploys the selected schemas to your database, creating collections accordingly.
328 |
329 | CLI:
330 | ```shell
331 | npx brainyduck deploy-schemas [pattern]
332 | ```
333 |
334 | Defaults:
335 | * _pattern_: `**/*.(graphql|gql)`
336 |
337 | ### deploy-functions
338 |
339 |
340 | Deploys your [User-Defined Functions (UDF)](https://docs.fauna.com/fauna/current/build/fql/udfs).
341 |
342 | CLI:
343 | ```shell
344 | npx brainyduck deploy-functions [pattern]
345 | ```
346 |
347 | Defaults:
348 | * _pattern_: `**/*.udf`
349 |
350 | ### deploy-indexes
351 |
352 |
353 | Deploys your [User-Defined Indexes](https://docs.fauna.com/fauna/current/api/fql/indexes).
354 |
355 | CLI:
356 | ```shell
357 | npx brainyduck deploy-indexes [pattern]
358 | ```
359 |
360 | Defaults:
361 | * _pattern_: `**/*.index`
362 |
363 | ### deploy-roles
364 |
365 |
366 | Deploys your [User-Defined Roles (UDR)](https://docs.fauna.com/fauna/current/security/roles) to your database.
367 |
368 | CLI:
369 | ```shell
370 | npx brainyduck deploy-roles [pattern]
371 | ```
372 |
373 | Defaults:
374 | * _pattern_: `**/*.role`
375 |
376 | ### pull-schema
377 | Downloads the schema from Fauna and outputs the result.
378 | Useful only for debugging or inspecting purposes, otherwise used only internally.
379 |
380 | CLI:
381 | ```shell
382 | npx brainyduck pull-schema [output]
383 | ```
384 |
385 | Defaults:
386 | * _output_: ``
387 |
388 | ### reset
389 |
390 |
391 | The fastest way to restart or get rid of data you don't want to keep anymore.
392 |
393 | **BE CAREFUL, though, as the actions performed by `reset` are irreversible.** Please triple check your `--secret` before running this command!
394 |
395 | CLI:
396 | ```shell
397 | npx brainyduck reset [types]
398 | ```
399 |
400 | Defaults:
401 | * _types_: `functions,indexes,roles,documents,collections,databases,schemas`
402 |
403 | 
404 |
405 | ## CLI and Programmatic Access
406 |
407 | All commands can be accessed in multiple ways.
408 |
409 | Note that all CLI options have an equivalent environment variable you can set directly, as long as you follow the [constant case pattern](https://github.com/zvictor/brainyduck/blob/36f39d9b9e6c50654967b876767abdd905488b7c/cli.js#L18-L27) to do so.
410 |
411 | Fauna options start with `FAUNA_` and all other ones start with `BRAINYDUCK_`.
412 |
413 | E.g `--overwrite` becomes `BRAINYDUCK_OVERWRITE`; `--graphql-domain` becomes `FAUNA_GRAPHQL_DOMAIN`.
414 |
415 | ### Centralized CLI
416 |
417 | Just run `npx brainyduck [options]`.
418 |
419 | For more information, please check [usage](#usage) or run `npx brainyduck --help`.
420 |
421 | ### Programmatically
422 |
423 | Looking for fancy ways to automate your processes? You can import Brainyduck directly into your scripts, using the `import('brainyduck/')` pattern, like shown in the example below:
424 |
425 | ```ts
426 | import build from 'brainyduck/build'
427 |
428 | await build()
429 | ```
430 |
431 | ### Direct CLI
432 |
433 | You can access each command while skipping the CLI wrapper altogether.
434 |
435 | ```markup
436 | node ./node_modules/brainyduck/commands/ [...args]
437 | ```
438 |
439 | _The parameters of each script vary from file to file. You will need to [check the signature of each command](https://github.com/zvictor/brainyduck/tree/master/commands) on your own._
440 |
441 |
442 | 
443 |
444 | ## Bundling & Exporting
445 |
446 | Your SDK files will be cached at `./node_modules/brainyduck/.cache`.
447 |
448 | Most of the times you are developing you **don't need to worry about the location of those files as Brainyduck manages them for you** internally. Sometimes, however, (specially when bundling your projects) you might need to think on how to move them around and make sure that they stay available to your code regardless of changes in the environment.
449 |
450 | For such cases, there a few strategies you can choose from:
451 |
452 | ### rebuild
453 |
454 | It's okay to just rebuild your sdk in a new environment.
455 |
456 | ```Dockerfile
457 | FROM node
458 | ...
459 | ADD ./src .
460 | RUN npm install
461 | RUN npx brainyduck build
462 | ```
463 |
464 | ### clone
465 |
466 | The files in Brainyduck's cache are portable, meaning that you can just copy them around.
467 |
468 | _We wish all ducks could be cloned that easily!_ 🐣🧬🧑🔬
469 |
470 |
471 | ```Dockerfile
472 | ...
473 | FROM node
474 | ...
475 | ADD ./src .
476 | RUN npm install
477 | ADD ./node_modules/brainyduck/.cache ./node_modules/brainyduck/.cache
478 | ```
479 |
480 | 
481 |
482 | ## Contributing
483 |
484 | ### Coding Principles
485 | * Whenever possible the commands should be _non-operative_ (meaning that they can be run without altering the database its working with) and require no secret to execute.
486 |
487 | * Separation of concerns: The CLI file is just a wrapper invoking each command file, and that separation must be clear.
488 |
489 | ### Debugging & Testing
490 |
491 | 1. When debugging Brainyduck, it's always a very good idea to run the commands using the `--verbose` (or `DEBUG=brainyduck:*`) flag.
492 | Please make sure you **have that included in your logs before you report any bug**.
493 |
494 | 2. The tests are easy to run and are an important diagnosis tool to understand what could be going on in your environment.
495 | Clone Brainyduck's repository and then run the tests in one of the possible ways:
496 |
497 | **Note: Make sure you use a secret of a DB you create exclusively for these tests, as Brainyduck will potentially wipe all its data out!**
498 |
499 | _Note: `TESTS_SECRET` needs to have `admin` role._
500 |
501 | ```haskell
502 | -- Run all tests:
503 | TESTS_SECRET=secret_for_an_exclusive_db ./tests/run-tests.sh
504 | ```
505 |
506 | ```haskell
507 | -- Run only tests of a specific command:
508 | TESTS_SECRET=secret_for_an_exclusive_db ./tests/run-tests.sh specs/.js
509 | ```
510 |
511 | You can also set `DEBUG=brainyduck:*` when running tests to get deeper insights.
512 |
513 | 
514 |
515 | ## Sponsors
516 |
517 |