├── .gitignore
├── .travis.yml
├── LICENSE
├── README.md
├── __test__
└── starfleet.test.js
├── bin
├── createDockerCompose.js
├── createDockerfile.js
├── createFileStructure.js
├── createGeneratedServer.js
├── createResolvers.js
├── createSDL.js
├── deployDocker.js
├── runDocker.js
└── starfleet.js
├── package-lock.json
└── package.json
/.gitignore:
--------------------------------------------------------------------------------
1 | *.swp
2 | *.swo
3 | data/
4 | node_modules/
5 | Dockerfile
6 | docker-compose.yml
7 | graphqlsrc/
--------------------------------------------------------------------------------
/.travis.yml:
--------------------------------------------------------------------------------
1 | sudo: false
2 |
3 | language: node_js
4 |
5 | node_js:
6 | - node
7 |
8 | script:
9 | - npm run test:coverage
10 |
11 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | MIT License
2 |
3 | Copyright (c) 2019 Andrew Shin, Justin Jang, Jason Yu, and Mychal Estalilla
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy
6 | of this software and associated documentation files (the "Software"), to deal
7 | in the Software without restriction, including without limitation the rights
8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 | copies of the Software, and to permit persons to whom the Software is
10 | furnished to do so, subject to the following conditions:
11 |
12 | The above copyright notice and this permission notice shall be included in all
13 | copies or substantial portions of the Software.
14 |
15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21 | SOFTWARE.
22 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 |
6 |
7 | # Starfleet
8 |
9 | [](https://www.npmjs.com/)
10 |
11 | _**With one command, enable your MongoDB project to utilize the high-performance GraphQL API!**_
12 | _**Interested in trying out GraphQL? Quickly convert all your mongoose schemas to viewable and editable GraphQL pieces.**_
13 |
14 | **Starfleet** is a command line tool that lets you easily harness the flexibility of MongoDB with the declarative power of GraphQL.
15 |
16 | [**GraphQL**](https://graphql.org/) is a popular and rapidly growing data query and manipulation language for APIs, eliminating issues that often come with RESTful API services such as over- and under-fetching data. GraphQL is language agnostic and incorporates strong data types. It even has built in introspection - what more is there to love?
17 |
18 | [**MongoDB**](https://www.mongodb.com/) is one of the most popular NoSQL database management systems out there. Its document-oriented structure lends itself to high horizontal scalability and the enforcement of strong data integrity. It is also open-sourced and has a great community!
19 |
20 | GraphQL requires a lot of boilerplate code just to get started, whether you're starting from scratch or have an existing codebase. Starfleet gives you a powerful and convenient way to onboard GraphQL. You can access Starfleet via the CLI, with both the ability to create and test any project that utilizes MongoDB & Mongoose ODM with GraphQL. You can even use Starfleet to spin up your project in a **docker** container! Sound good? Let's get started!
21 |
22 | ## Prerequisites
23 |
24 | You should have your file(s) containing the mongoose models you'd like converted in one folder (default: /models). Starfleet will generate a '/graphqlsrc' file directory containing all the boilerplate GraphQL pieces from the files in the provided folder.
25 |
26 | If you want to deploy your project using docker, you must set up docker beforehand (https://docs.docker.com/get-started/).
27 |
28 |
29 | ## CLI
30 |
31 | To get started, install Starfleet globally or locally:
32 |
33 | ```
34 | npm install -g starfleet-command
35 | ```
36 |
37 | Then, navigate into your project directory and run:
38 |
39 | ```
40 | starfleet init
41 | ```
42 |
43 | The /graphqlsrc folder will be created in your current working directory:
44 |
45 | ```
46 | -graphqlsrc
47 | -models
48 | -starfleet-SDL.graphql
49 | -resolvers
50 | -starfleet-resolvers.graphql
51 | ```
52 | Additionally, a '/starfleet-server.js' file will be created in your current working directory. The SDL file and resolvers file (with default CRUD operations) are automatically imported to starfleet-server.js file and used to initialize an [Apollo Server](https://www.apollographql.com/docs/apollo-server/). (If you don't already know about Apollo Server, it is a very powerful library for GraphQL that helps you connect a GraphQL schema to an HTTP server in Node.js. It comes with powerful tools such as caching and performance monitoring. Visit Apollo's website for more information!)
53 |
54 | If you want to try out your converted mongoose schemas with GraphQL, install Apollo Server by running
55 |
56 | ```
57 | npm install apollo-server graphql
58 | ```
59 |
60 | modify your package.json file, and then run npm start to test it out in a GraphQL playground!
61 |
62 | ## Deployment
63 |
64 | Starfleet lets you test your GraphQL project in a docker container. Once you have docker installed and running on your local machine, just run:
65 |
66 | ```
67 | starfleet deploy --docker
68 | ```
69 |
70 | A 'Dockerfile' and 'docker-compose-starfleet.yml' will be created in your current working directory and then immediately used to deploy your application to docker at the same port specified in the starfleet-server.js file. To terminate the created docker container, just run:
71 |
72 | ```
73 | starfleet land --docker
74 | ```
75 |
76 | ## Additional
77 |
78 | All possible commands and flags and additional information can be viewed in the CLI by running:
79 |
80 | ```
81 | starfleet --help
82 | ```
83 |
84 | If at any time you'd like to delete the generated files, enter:
85 |
86 | ```
87 | starfleet cleanup
88 | ```
89 |
90 | We are actively welcoming pull requests or any feedback/requests.
91 |
92 | ## Built With
93 |
94 | * [GraphQL](https://graphql.org/)
95 | * [Apollo Server](https://www.apollographql.com/docs/apollo-server/)
96 | * [Docker](https://www.docker.com/)
97 |
98 |
99 | ## Contributors
100 |
101 | * **Andrew Shin** - (https://github.com/andrewsnapz)
102 | * **Jason Yu** - (https://github.com/json-yu)
103 | * **Justin Jang** - (https://github.com/justin1j)
104 | * **Mychal Estalilla** - (https://github.com/mychaI)
105 |
106 | ## License
107 |
108 | This project is licensed under the MIT License - see the [LICENSE.md](LICENSE.md) file for details
109 |
--------------------------------------------------------------------------------
/__test__/starfleet.test.js:
--------------------------------------------------------------------------------
1 | const fs = require('fs');
2 | const shell = require('shelljs');
3 | const find = require('find');
4 | const createFileStructure = require('../bin/createFileStructure');
5 | const createGeneratedServer = require('../bin/createGeneratedServer');
6 | const createDockerCompose = require('../bin/createDockerCompose');
7 | const createDockerfile = require('../bin/createDockerfile');
8 | const chalk = require('chalk');
9 |
10 | // tests do not properly delete generated files/folders
11 |
12 | //test to see if the file structure has been invoked and created:
13 | describe('starfleet tests:', () => {
14 | describe(chalk.blue.bold('starfleet init command'), () => {
15 | test(chalk.yellow('creates new file structure in working directory'), async() => {
16 | let result;
17 | await find.dir('graphqlsrc', dir => {
18 | if (dir.length === 2) {
19 | result = true;
20 | expect(result).toBe(true);
21 | }
22 | })
23 | await createFileStructure();
24 | await find.dir('graphqlsrc', dir => {
25 | if (dir.length === 2) {
26 | result = true;
27 | expect(result).toBe(true);
28 | shell.exec(`rmdir ${dir}`);
29 | } else {
30 | result = false;
31 | expect(result).toBe(true);
32 | }
33 | })
34 | })
35 |
36 | test(chalk.yellow('create generated server'), async() => {
37 | let result;
38 | await fs.access('./graphqlServer.js', fs.F_OK, (err) => {
39 | if (!err) {
40 | result = true;
41 | return expect(result).toBe(true);
42 | } else {
43 | result = true;
44 | createGeneratedServer("URL", "Practice");
45 | // fs.unlinkSync('./graphqlServer.js');
46 | shell.exec('rmdir graphqlServer.js')
47 | return expect(result).toBe(true);
48 | }
49 | })
50 | })
51 | })
52 |
53 | describe(chalk.blue.bold('starfleet deploy command'), () => {
54 | test(chalk.yellow('creation of docker file'), async() => {
55 | let result;
56 | await fs.access('./Dockerfile', fs.F_OK, (err) => {
57 | if (!err) {
58 | result = true;
59 | return expect(result).toBe(true)
60 | } else {
61 | result = true;
62 | createDockerfile('Dockerfile', 4000);
63 | fs.unlinkSync('./Dockerfile');
64 | return expect(result).toBe(true);
65 | }
66 | })
67 | })
68 |
69 | test(chalk.yellow('creation of docker compose file'), async() => {
70 | let result;
71 | await fs.access('./docker-compose-starfleet.yml', fs.F_OK, (err) => {
72 | if (!err) {
73 | result = true;
74 | return expect(result).toBe(true)
75 | } else {
76 | result = true;
77 | createDockerCompose('Test', 4000);
78 | fs.unlinkSync('./docker-compose-starfleet.yml')
79 | return expect(result).toBe(true)
80 | }
81 | })
82 | })
83 | })
84 | })
--------------------------------------------------------------------------------
/bin/createDockerCompose.js:
--------------------------------------------------------------------------------
1 | // Helper function used in starfleet.js; check subcommands sections of starfleet.js file
2 | const fs = require('fs'); // node file system
3 | const shell = require('shelljs'); // Unix shell commands for Node.js
4 | const chalk = require('chalk'); // Terminal string styling
5 |
6 | const createDockerCompose = (PROJECT_NAME, PORT) => {
7 |
8 |
9 | const filePath = `${process.cwd()}/docker-compose-starfleet.yml`; // create this file
10 | const text = // with this text being written
11 | `version: "2"
12 | services:
13 | app:
14 | container_name: ${PROJECT_NAME}
15 | restart: always
16 | build: .
17 | ports:
18 | - "${PORT}:${PORT}"
19 | links:
20 | - mongo
21 |
22 | mongo:
23 | container_name: mongo
24 | image: mongo
25 | volumes:
26 | - ./data:/data/db
27 | ports:
28 | - "27017:27017"
29 |
30 | admin-mongo:
31 | image: 0x59/admin-mongo:latest
32 | ports:
33 | - "8082:8082"
34 | environment:
35 | - PORT=8082
36 | - CONN_NAME=mongo
37 | - DB_HOST=mongo
38 | links:
39 | - mongo
40 | `
41 | shell.touch(filePath); // this method will create file with filepath variable
42 | fs.writeFile(filePath, text, err => {
43 | if (err) {
44 | console.log(chalk.red.bold('Error creating docker-compose.yml'));
45 | throw err;
46 | }
47 | return;
48 | });
49 |
50 | console.log(chalk.green('✔'),chalk.cyan.bold('Done! Your docker-compose.yml file has been created and put into your working directory!'));
51 | };
52 |
53 | module.exports = createDockerCompose;
54 |
55 |
56 |
57 |
58 |
59 |
--------------------------------------------------------------------------------
/bin/createDockerfile.js:
--------------------------------------------------------------------------------
1 | const chalk = require("chalk"); //terminal string styling done right
2 | const shell = require("shelljs"); // portable unix shell commands for node.js
3 | const fs = require("fs");
4 |
5 | const createDockerfile = (PROJECT_NAME, PORT) => {
6 |
7 | const filePath = `${process.cwd()}/Dockerfile`
8 | const text = `FROM node:latest \n\nWORKDIR /usr/src/app/${PROJECT_NAME} \n\nCOPY package.json /usr/src/app/${PROJECT_NAME}/ \n\nRUN npm install \n\nCOPY . /usr/src/app/${PROJECT_NAME} \n\nEXPOSE ${PORT} \n\nENTRYPOINT ["node", "./starfleet-server.js"]`;
9 | shell.touch(filePath);
10 | fs.writeFile(filePath, text, (err) => {
11 | if (err) {
12 | throw err;
13 | } else {
14 | console.log(chalk.green('✔'), chalk.cyan.bold(`Done! Your docker file has been created and put into your working directory!`))
15 | return;
16 | }
17 | });
18 | }
19 |
20 | module.exports = createDockerfile;
21 |
--------------------------------------------------------------------------------
/bin/createFileStructure.js:
--------------------------------------------------------------------------------
1 | // Helper function used in starfleet.js; check subcommands sections of starfleet.js file
2 | const shell = require("shelljs"); // portable unix shell commands for node.js
3 |
4 | const createFileStructure = () => {
5 | // creates new src folder
6 | const srcText = `graphqlsrc`
7 | shell.mkdir(srcText);
8 |
9 | // creates file structure in the new src folder; folder names taken from text arr
10 | const text = ['models', 'resolvers'];
11 | text.forEach(element => {
12 | let filepath = `${process.cwd()}/graphqlsrc/${element}`
13 | shell.mkdir(filepath);
14 | })
15 | }
16 |
17 | module.exports = createFileStructure;
18 |
--------------------------------------------------------------------------------
/bin/createGeneratedServer.js:
--------------------------------------------------------------------------------
1 | const fs = require("fs");
2 | const chalk = require('chalk');
3 |
4 | function createGeneratedServer(mongoDBURI, mongoDBname) {
5 | const serverText = `
6 | const mongoose = require('mongoose');
7 | const fs = require('fs');
8 | const { ApolloServer } = require('apollo-server');
9 | const typeDefs = fs.readFileSync('./graphqlsrc/models/starfleet-SDL.graphql', 'utf8');
10 | const resolvers = require('./graphqlsrc/resolvers/starfleet-resolvers')
11 |
12 | const MONGO_URI = '${mongoDBURI}';
13 |
14 | mongoose.connect(MONGO_URI, {
15 | useNewUrlParser: true,
16 | useUnifiedTopology: true,
17 | dbName: '${mongoDBname}'
18 | })
19 | .then(() => console.log('MongoDB successfully connected'))
20 | .catch( err => console.log('Error connecting to db: ', err));
21 |
22 | const server = new ApolloServer({
23 | typeDefs,
24 | resolvers
25 | });
26 |
27 | server.listen().then(({ url }) => {
28 | console.log('🚀 Server ready at ' + url);
29 | });
30 | `
31 | fs.writeFile(`${process.cwd()}/starfleet-server.js`, serverText, err => {
32 | if (err) console.log(err);
33 | return console.log(chalk.green('✔'),chalk.cyan.bold('A graphQL ready starfleet-server.js file has been created and added to your current working directory!'))
34 | })
35 | }
36 |
37 | function createGeneratedDeployServer(mongoDBURI, mongoDBname) {
38 | const serverText = `
39 | const mongoose = require('mongoose');
40 | const fs = require('fs');
41 | const { ApolloServer } = require('apollo-server');
42 | const typeDefs = fs.readFileSync('./graphqlsrc/models/starfleet-SDL.graphql', 'utf8');
43 | const resolvers = require('./graphqlsrc/resolvers/starfleet-resolvers')
44 |
45 | const MONGO_URI = '${mongoDBURI}';
46 |
47 | mongoose.connect(MONGO_URI, {
48 | useNewUrlParser: true,
49 | useUnifiedTopology: true,
50 | dbName: '${mongoDBname}'
51 | })
52 | .then(() => console.log('MongoDB successfully connected'))
53 | .catch( err => console.log('Error connecting to db: ', err));
54 |
55 | const server = new ApolloServer({
56 | typeDefs,
57 | resolvers
58 | });
59 |
60 | server.listen().then(({ url }) => {
61 | console.log('🚀 Server ready at ' + url);
62 | });
63 | `
64 | fs.writeFile(`${process.cwd()}/starfleet-server.js`, serverText, err => {
65 | if (err) console.log(err);
66 | return console.log(chalk.green('✔'),chalk.cyan.bold('A graphQL ready starfleet-server.js file has been created and added to your current working directory!'))
67 | })
68 | }
69 |
70 | module.exports = {
71 | createGeneratedServer,
72 | createGeneratedDeployServer
73 | }
74 |
--------------------------------------------------------------------------------
/bin/createResolvers.js:
--------------------------------------------------------------------------------
1 | const fs = require('fs');
2 |
3 | const importModel = (modelName, modelPath, filename) => {
4 | const dependencies = `const ${modelName} = require('${modelPath}');\n`;
5 | try {
6 | fs.writeFileSync(filename, dependencies, {flag: 'a'});
7 | } catch (err) {
8 | console.log('Error writing model imports: ', err);
9 | }
10 | };
11 |
12 | const insertModuleExports = filename => {
13 | const moduleExports = '\nmodule.exports = resolvers = {\n';
14 | try {
15 | fs.writeFileSync(filename, moduleExports, {flag: 'a'});
16 | } catch (err) {
17 | console.log('Error writing module exports: ', err);
18 | }
19 | }
20 |
21 | const startQueryBlock = filename => {
22 | const startQueryBlock = 'Query: {\n';
23 | try {
24 | fs.writeFileSync(filename, startQueryBlock, {flag: 'a'});
25 | } catch (err) {
26 | console.log('Error writing query block(s): ', err);
27 | }
28 | }
29 |
30 | const startMutationBlock = filename => {
31 | const startMutationBlock = 'Mutation: {\n';
32 | try {
33 | fs.writeFileSync(filename, startMutationBlock, {flag: 'a'});
34 | } catch (err) {
35 | console.log('Error writing startMutationBlock: ', err);
36 | }
37 | };
38 |
39 | const endResolverBlock = (filename, endResolverBlock) => {
40 | try {
41 | fs.writeFileSync(filename, endResolverBlock, {flag: 'a'});
42 | } catch (err) {
43 | console.log('Error writing endResolverBlock: ', err);
44 | }
45 | }
46 |
47 | const createQueryResolver = (modelName, modelPath, filename) => {
48 |
49 | const modelResolver = `
50 | ${modelName}ById: async (obj, args) => {
51 | const ${modelName.toLowerCase()} = await ${modelName}.findById(args._id);
52 | return ${modelName.toLowerCase()};
53 | },
54 | ${modelName}ByIds: async (obj, args) => {
55 | const ${modelName.toLowerCase()}_ids = await args._ids.map((id) => id);
56 | const ${modelName.toLowerCase()} = await ${modelName}.find({
57 | _id: {
58 | $in: ${modelName.toLowerCase()}_ids
59 | }
60 | });
61 | return ${modelName.toLowerCase()};
62 | },
63 | ${modelName}One: async (obj, args) => {
64 | for (key in args) {
65 | if (key === 'filter') {
66 | for (prop in args[key]) {
67 | const field = {};
68 | field[prop] = args[key][prop];
69 | const ${modelName.toLowerCase()} = await ${modelName}.findOne(field);
70 | return ${modelName.toLowerCase()};
71 | }
72 | }
73 | }
74 | },
75 | ${modelName}Many: async (obj, args) => {
76 | for (key in args) {
77 | if (key === 'filter') {
78 | for (prop in args[key]) {
79 | const field = {};
80 | field[prop] = args[key][prop];
81 | const ${modelName.toLowerCase()} = await ${modelName}.find(field);
82 | return ${modelName.toLowerCase()};
83 | }
84 | }
85 | }
86 | },
87 | `;
88 |
89 | try {
90 | fs.writeFileSync(filename, modelResolver, {flag: 'a'});
91 | } catch (err) {
92 | console.log('Error writing query resolvers: ', err);
93 | }
94 |
95 | }
96 |
97 | const createMutationResolver = (modelName, modelPath, filename) => {
98 |
99 | const modelResolver = `
100 | ${modelName}CreateOne: async (obj, args) => {
101 | const record = {};
102 | for (key in args) {
103 | const newModel = new ${modelName}(args[key]);
104 | const newDoc = await newModel.save();
105 | if (!newDoc) {
106 | throw new Error('error saving document');
107 | }
108 | record[key] = newDoc;
109 | return record;
110 | }
111 | },
112 | ${modelName}CreateMany: async (obj, args) => {
113 | const payload = {};
114 | const records = [];
115 | const recordIds = [];
116 | for (key in args) {
117 | if (key === 'records') {
118 | for (let i = 0; i < args[key].length; i++) {
119 | const newModel = new ${modelName}(args[key][i]);
120 | const newDoc = await newModel.save();
121 | if (!newDoc) {
122 | throw new Error('error saving document');
123 | }
124 | records.push(newDoc);
125 | recordIds.push(newDoc._id);
126 | };
127 | }
128 | };
129 | payload['records'] = records;
130 | payload['recordIds'] = recordIds;
131 | payload['createCount'] = records.length;
132 | return payload;
133 | },
134 | ${modelName}UpdateById: async (obj, args) => {
135 | for (key in args) {
136 | if (key === 'record') {
137 | const update = {};
138 | for (field in args[key]) {
139 | update[field] = args[key][field];
140 | }
141 | const updatedDoc = await ${modelName}.findByIdAndUpdate(args[key]._id, update, {useFindAndModify: false, new: true})
142 | .catch( err => console.log('No document found'));
143 | if (!updatedDoc) {
144 | throw new Error('error updating document, ensure MongoID is correct')
145 | }
146 | return { record: updatedDoc };
147 | }
148 | }
149 | },
150 | ${modelName}UpdateOne: async (obj, args) => {
151 | for (key in args) {
152 | if (key === 'filter') {
153 | const conditions = args[key];
154 | const updatedDoc = await ${modelName}.findOneAndUpdate(conditions, args.record, { useFindAndModify: false, new: true })
155 | .catch( err => console.log('No document found under given conditions'));
156 | if (!updatedDoc) {
157 | throw new Error('error finding and updating document, ensure filter conditions are correct');
158 | };
159 | return { record: updatedDoc };
160 | }
161 | }
162 | },
163 | ${modelName}RemoveById: async (obj, args) => {
164 | if (args.hasOwnProperty('_id')) {
165 | const removedDoc = await ${modelName}.findByIdAndRemove(args._id, { useFindAndModify: false })
166 | .catch( err => console.log('No document found'));
167 | if (!removedDoc) {
168 | throw new Error('error finding and removing document, ensure _id is correct');
169 | };
170 | return { record: removedDoc };
171 | } else {
172 | throw new Error('_id is required');
173 | }
174 | },
175 | ${modelName}RemoveOne: async (obj, args) => {
176 | for (key in args) {
177 | if (key === 'filter') {
178 | const field = {};
179 | for (prop in args[key]) {
180 | field[prop] = args[key][prop];
181 | }
182 | const removedDoc = await ${modelName}.findOneAndRemove(field, { useFindAndModify: false })
183 | .catch( err => console.log('Error finding and removing document'));
184 | if (!removedDoc) {
185 | throw new Error('Error finding and removing document, ensure filter conditions are correct')
186 | }
187 | return { record: removedDoc };
188 | }
189 | }
190 | },
191 | `;
192 |
193 | try {
194 | fs.writeFileSync(filename, modelResolver, {flag: 'a'});
195 | } catch (err) {
196 | console.log('Error writing mutation resolvers: ', err);
197 | }
198 | }
199 |
200 | const endResolver = filename => {
201 | try {
202 | fs.writeFileSync(filename, '}', {flag: 'a'});
203 | } catch (err) {
204 | console.log('Error writing end resolver block: ', err);
205 | }
206 | }
207 |
208 | module.exports = {
209 | importModel,
210 | startQueryBlock,
211 | startMutationBlock,
212 | createQueryResolver,
213 | createMutationResolver,
214 | endResolverBlock,
215 | insertModuleExports,
216 | };
217 |
--------------------------------------------------------------------------------
/bin/createSDL.js:
--------------------------------------------------------------------------------
1 | // Helper function used in starfleet.js; check subcommands sections of starfleet.js file
2 | const fs = require('fs');
3 | const chalk = require("chalk");
4 |
5 | const { composeWithMongoose } = require('graphql-compose-mongoose');
6 | const { schemaComposer } = require('graphql-compose');
7 | const { printSchema } = require('graphql');
8 |
9 | const createSDL = (model, modelName) => {
10 | // converts passed in mongoose schemas to graphql pieces
11 | const customizationOptions = {};
12 | const ModelTC = composeWithMongoose(model, customizationOptions);
13 |
14 | // adds basic CRUD operations to converted schema
15 | schemaComposer.Query.addFields({
16 | [modelName+"ById"] : ModelTC.getResolver('findById'),
17 | [modelName+"ByIds"] : ModelTC.getResolver('findByIds'),
18 | [modelName+"One"] : ModelTC.getResolver('findOne'),
19 | [modelName+"Many"] : ModelTC.getResolver('findMany'),
20 | // [modelName+"Count"] : ModelTC.getResolver('count'),
21 | // [modelName+"Connection"] : ModelTC.getResolver('connection'),
22 | // [modelName+"Pagination"] : ModelTC.getResolver('pagination'),
23 | });
24 |
25 | schemaComposer.Mutation.addFields({
26 | [modelName+"CreateOne"] : ModelTC.getResolver('createOne'),
27 | [modelName+"CreateMany"] : ModelTC.getResolver('createMany'),
28 | [modelName+"UpdateById"] : ModelTC.getResolver('updateById'),
29 | [modelName+"UpdateOne"] : ModelTC.getResolver('updateOne'),
30 | [modelName+"UpdateMany"] : ModelTC.getResolver('updateMany'),
31 | [modelName+"RemoveById"] : ModelTC.getResolver('removeById'),
32 | [modelName+"RemoveOne"] : ModelTC.getResolver('removeOne'),
33 | [modelName+"RemoveMany"] : ModelTC.getResolver('removeMany'),
34 | });
35 |
36 | // utilizes schemaComposer library's .buildSchema to add CRUD operations
37 | // this is different than graphql's native buildSchema() - that only adds default resolvers
38 | const graphqlSchemaObj = schemaComposer.buildSchema();
39 | // printSchema is graphQL's built in GraphQL to SDL converter
40 | const graphqlSDL = printSchema(graphqlSchemaObj, { commentDescriptions: true });
41 |
42 | // generates SDL file and writes to desginated path
43 | fs.writeFile('./graphqlsrc/models/starfleet-SDL.graphql', graphqlSDL, err => {
44 | if (err) {
45 | return console.log(err);
46 | }
47 | console.log(chalk.green('✔'), chalk.cyan.bold('Done! Your GraphQL'), chalk.blue(modelName),chalk.cyan.bold('schema has been created and added to your'), chalk.blue('graphqlsrc'), chalk.cyan.bold('directory!'));
48 | });
49 |
50 | };
51 |
52 |
53 | module.exports = createSDL;
54 |
--------------------------------------------------------------------------------
/bin/deployDocker.js:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env node
2 | const inquirer = require('inquirer');
3 | const createDockerfile = require('./createDockerfile.js');
4 | const createDockerCompose= require('./createDockerCompose.js');
5 | const { build, up } = require('./runDocker.js');
6 |
7 | const deployDocker = () => {
8 | const prompts = [
9 | {
10 | name: 'PROJECTNAME',
11 | message: 'Please enter a name for your project: ',
12 | type: 'input',
13 | default: 'gql-project'
14 | },
15 | {
16 | name: 'PORT',
17 | message: 'Please specify a port (press ENTER to accept default port 4000): ',
18 | type: 'number',
19 | default: 4000
20 | }
21 | ]
22 | inquirer.prompt(prompts)
23 | .then(async answers => {
24 | await createDockerfile(answers.PROJECTNAME, answers.PORT);
25 | await createDockerCompose(answers.PROJECTNAME, answers.PORT);
26 | await build();
27 | await up();
28 | })
29 | }
30 |
31 | deployDocker();
32 |
33 |
--------------------------------------------------------------------------------
/bin/runDocker.js:
--------------------------------------------------------------------------------
1 | const chalk = require('chalk');
2 | const { exec, spawn} = require('child_process');
3 |
4 | const build = () => {
5 | //const pwd = spawnSync('pwd', { encoding: 'utf-8' });
6 | const options = {
7 | encoding: 'utf-8'
8 | };
9 |
10 | const newBuild = spawn('docker-compose', ['-f','docker-compose-starfleet.yml','build'], options);
11 | newBuild.stdout.on('data', data => {
12 | console.log(`Building docker images: ${data}`);
13 | });
14 |
15 | newBuild.on('exit', (code, signal) => {
16 | console.log(chalk.green('✔'), 'Build process complete');
17 | console.log(chalk.cyan('--- Deploying fleet ---. Press'),chalk.yellow.bold('CTRL + C'),chalk.cyan.bold('to quit'));
18 | });
19 | };
20 |
21 | const up = () => {
22 | const options = {
23 | encoding: 'utf-8'
24 | };
25 |
26 | const newDeploy = spawn('docker-compose', ['-f','docker-compose-starfleet.yml','up','-d'], options);
27 |
28 | newDeploy.on('data', data => {
29 | console.log(`Deploying fleet: ${data}`);
30 | });
31 |
32 | newDeploy.on('exit', (code, signal) => {
33 | console.log('Deploy process status ' + `code ${code} and signal ${signal}`);
34 | });
35 |
36 | };
37 |
38 | const stop = () => {
39 | console.log('Stopping Starfleet containers')
40 | exec(`docker-compose -f docker-compose-starfleet.yml down`, {shell: '/bin/bash'}, (err, stdout, stderr) => {
41 | if (err) console.log('Error stopping containers: ', err);
42 | console.log('Successfully landed')
43 | });
44 | };
45 |
46 |
47 | module.exports = {
48 | build,
49 | up,
50 | stop
51 | }
52 |
53 |
54 |
--------------------------------------------------------------------------------
/bin/starfleet.js:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env node
2 | // shebang line needed for running starfleet commands (reference bin lines in package.json file)
3 |
4 | const program = require('commander');
5 | const fs = require('fs');
6 | const path = require('path');
7 | const inquirer = require('inquirer');
8 | const chalk = require('chalk');
9 |
10 | // Metadata
11 | const {
12 | version
13 | } = require('../package.json');
14 | const {
15 | description
16 | } = require('../package.json');
17 |
18 | // Subcommands
19 | const createSDL = require('./createSDL');
20 | const createFileStructure = require('./createFileStructure');
21 | const createDockerfile = require('./createDockerfile');
22 | const createDockerCompose= require('./createDockerCompose');
23 | const { createGeneratedServer, createGeneratedDeployServer } = require('./createGeneratedServer');
24 | const { build, up, stop } = require('./runDocker')
25 | const {
26 | importModel,
27 | startQueryBlock,
28 | startMutationBlock,
29 | createQueryResolver,
30 | createMutationResolver,
31 | endResolverBlock,
32 | insertModuleExports
33 | } = require('./createResolvers');
34 |
35 | program
36 | .version(version)
37 | .description(description)
38 |
39 | // "starfleet init" command for converting mongoose schema to gql pieces
40 | program
41 | .command('init')
42 | .alias('i')
43 | .description('Initializing GraphQL services')
44 | .action(() => {
45 |
46 | const srcPath = `${process.cwd()}/graphqlsrc`
47 |
48 | if(!fs.existsSync(srcPath)) {
49 | createFileStructure();
50 | } else {
51 | console.log('GraphQL structure already exists. Skipping...')
52 | }
53 |
54 | // questions used by inquirer to create variable inputs
55 | const questions = [
56 | {
57 | name: "USERINPUT",
58 | message: "Please enter the name of the folder where your schema is in:",
59 | type: "input",
60 | default: "models"
61 | },
62 | {
63 | name: "URI",
64 | message: "Please provide your MongoDB connection string (URI): ",
65 | type: "input",
66 | default: "mongodb://localhost:27017/"
67 | },
68 | {
69 | name: "DATABASENAME",
70 | message: "What is the name of your database?",
71 | type: "input",
72 | default: "starfleet"
73 | },
74 | ];
75 |
76 | // creates SDL file after reading from user-inputted models file path
77 | inquirer.prompt(questions)
78 | .then(answers => {
79 | const workdir = `${answers.USERINPUT}`
80 |
81 | fs.readdirSync('./' + workdir).forEach(file => {
82 | const filename = path.parse(`${process.cwd()}/${workdir}/${file}`).name
83 | // each file name is passed in to createSDL; will be the prefix for all corresponding GQL types and resolvers
84 | const model = require(`${process.cwd()}/${workdir}/${file}`);
85 |
86 | // if the model file is only exporting one model, it will hit the function if block
87 | if (typeof model === "function") {
88 | // no edge case for if provided model is incorrect function
89 | createSDL(model, filename);
90 | } else if (typeof model === 'object' && Object.entries(model).length !== 0) { // if the model file has multiple, it will be an object containing all the different schemas inside
91 | for (const key in model) {
92 | createSDL(model[key], key);
93 | }
94 | } else {
95 | console.log(chalk.red('Skipping SDL file creation. An invalid Mongoose model was provided. Please make sure that you are exporting your models correctly.'))
96 | }
97 | });
98 |
99 | // creates resolver file
100 | const resolve = () => {
101 | let startExports = true;
102 | let startQuery = true;
103 | let startMutation = true;
104 | const models = fs.readdirSync('./' + workdir);
105 |
106 | // 1. Import all Mongoose models
107 | models.forEach(file => {
108 | const filename = path.parse(`${process.cwd()}/${workdir}/${file}`).name;
109 | const model = require(`${process.cwd()}/${workdir}/${file}`);
110 | // if the model file is only exporting one model, it will hit the function if block
111 | if (typeof model === "function") {
112 | // no edge case for if provided model is incorrect function
113 | importModel(filename, `../../${workdir}/${file}`, generatedResolverFile);
114 | } else if (typeof model === 'object' && Object.entries(model).length !== 0) { // if the model file has multiple, it will be an object containing all the different schemas inside
115 | for (const key in model) {
116 | importModel(key, `../../${workdir}/${file}`, generatedResolverFile);
117 | }
118 | } else {
119 | console.log(chalk.red('Skipping resolver creation. An invalid Mongoose model was provided. Please make sure that you are exporting your models correctly.'))
120 | }
121 | });
122 |
123 | // 2. Create Query resolvers for each model
124 | models.forEach(file => {
125 | if (startExports) {
126 | insertModuleExports(generatedResolverFile);
127 | startExports = false;
128 | }
129 | if (startQuery) {
130 | startQueryBlock(generatedResolverFile);
131 | startQuery = false;
132 | }
133 | const filename = path.parse(`${process.cwd()}/${workdir}/${file}`).name;
134 | const model = require(`${process.cwd()}/${workdir}/${file}`);
135 | if (typeof model === "function") {
136 | // no edge case for if provided model is incorrect function
137 | createQueryResolver(filename, `${workdir}/${file}`, generatedResolverFile);
138 | } else if (typeof model === 'object' && Object.entries(model).length !== 0) { // if the model file has multiple, it will be an object containing all the different schemas inside
139 | for (const key in model) {
140 | createQueryResolver(key, `${workdir}/${file}`, generatedResolverFile);
141 | }
142 | } else {
143 | console.log(chalk.red('Skipping resolver creation. An invalid Mongoose model was provided. Please make sure that you are exporting your models correctly.'))
144 | }
145 | });
146 |
147 |
148 | // 3. Close Query Block
149 | endResolverBlock(generatedResolverFile, '},\n');
150 |
151 | // 4. Create Mutation resolvers for each model
152 | models.forEach(file => {
153 | if (startMutation) {
154 | startMutationBlock(generatedResolverFile);
155 | startMutation = false;
156 | }
157 | const filename = path.parse(`${process.cwd()}/${workdir}/${file}`).name;
158 | const model = require(`${process.cwd()}/${workdir}/${file}`);
159 | if (typeof model === "function") {
160 | // no edge case for if provided model is incorrect function
161 | createMutationResolver(filename, `${workdir}/${file}`, generatedResolverFile);
162 | } else if (typeof model === 'object' && Object.entries(model).length !== 0) { // if the model file has multiple, it will be an object containing all the different schemas inside
163 | for (const key in model) {
164 | createMutationResolver(key, `${workdir}/${file}`, generatedResolverFile);
165 | }
166 | } else {
167 | console.log(chalk.red('Skipping resolver creation. An invalid Mongoose model was provided. Please make sure that you are exporting your models correctly.'))
168 | }
169 | });
170 |
171 | // 5. Close Resolvers Block
172 | endResolverBlock(generatedResolverFile, '},\n');
173 | endResolverBlock(generatedResolverFile, '}');
174 | }
175 |
176 | const generatedResolverFile = `${process.cwd()}/graphqlsrc/resolvers/starfleet-resolvers.js`
177 | fs.access(generatedResolverFile, fs.constants.F_OK, err => {
178 | err ? resolve() : console.log(chalk.red('Skipping resolver file creation. Resolver file already exists in graphqlsrc directory. To generate a new resolver file, either manually delete starfleet-resolvers.js or run command'), chalk.white('starfleet unresolve'), chalk.red('to remove it'));
179 | });
180 |
181 | // creates server file
182 | createGeneratedServer(answers.URI, answers.DATABASENAME)
183 | })
184 | });
185 |
186 |
187 | // "starfleet deploy/d ['-d', '--docker']" command to deploy to docker"
188 | program
189 | .command('deploy')
190 | .alias('d')
191 | .description('Deploys newly created GQL service to docker')
192 | .option("-d, --docker", "deploy to docker")
193 | .action( () => {
194 | console.log('NOTE: You must have docker daemon running in order to deploy docker containers via starfleet');
195 | if (!process.argv[3]) {
196 | console.log(chalk.red('\nPlease enter a valid deployment option. See'),chalk.white('--help'), chalk.red(' for assistance\n'));
197 | return;
198 | }
199 | const env = process.argv[3].toLowerCase();
200 | if (env === '--docker' || env === '-d') {
201 |
202 | const prompts = [
203 | {
204 | name: 'PROJECTNAME',
205 | message: 'Please enter a name for your project: ',
206 | type: 'input',
207 | default: 'gql-project'
208 | },
209 | {
210 | name: 'PORT',
211 | message: 'Please specify a port (press ENTER to accept default port 4000): ',
212 | type: 'number',
213 | default: 4000
214 | }
215 | ]
216 |
217 | inquirer.prompt(prompts)
218 | .then( answers => {
219 | createGeneratedDeployServer('mongodb://mongo:27017/', 'starfleet');
220 | createDockerfile(answers.PROJECTNAME, answers.PORT);
221 | createDockerCompose(answers.PROJECTNAME, answers.PORT);
222 | build();
223 | up();
224 | });
225 | }
226 | else if (env === 'lambda' || env === '-l') console.log('deploying to lambda');
227 | });
228 |
229 | program
230 | .command('land')
231 | .alias('l')
232 | .description('Stops created docker container')
233 | .option('-d, --docker', 'terminate docker containers')
234 | .action(() => {
235 |
236 | if (!process.argv[3]) {
237 | console.log(chalk.red('\nPlease enter a valid deployment option. See'), chalk.white('--help'), chalk.red(' for assistance\n'));
238 | return;
239 | }
240 |
241 | fs.access('./docker-compose-starfleet.yml', fs.constants.F_OK, err => {
242 | if (err) console.log('Missing file docker-compose-starfleet.yml, run command `starfleet deploy -d` to generate Docker containers');
243 | stop();
244 | });
245 |
246 | });
247 |
248 | program
249 | .command('cleanup')
250 | .alias('c')
251 | .description('Remove all generated folders & files from init command')
252 | .action(() => {
253 | const graphqlsrcDir = `${process.cwd()}/graphqlsrc`
254 | const modelsDir = `${process.cwd()}/graphqlsrc/models`
255 | const resolversDir = `${process.cwd()}/graphqlsrc/resolvers`
256 | const gqlFile = `${process.cwd()}/graphqlsrc/models/starfleet-SDL.graphql`;
257 | const resolversFile = `${process.cwd()}/graphqlsrc/resolvers/starfleet-resolvers.js`;
258 | const gqlServerFile = `${process.cwd()}/graphqlServer.js`
259 |
260 | fs.readdirSync(graphqlsrcDir).forEach(folder => {
261 | if (folder === 'models') {
262 | fs.unlinkSync(gqlFile)
263 | fs.rmdirSync(modelsDir)
264 | }
265 |
266 | if (folder === 'resolvers') {
267 | fs.unlinkSync(resolversFile)
268 | fs.rmdirSync(resolversDir)
269 | }
270 | })
271 |
272 | fs.rmdirSync(graphqlsrcDir)
273 | fs.unlinkSync(gqlServerFile)
274 | });
275 |
276 | program.parse(process.argv);
277 |
--------------------------------------------------------------------------------
/package.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "starfleet-command",
3 | "version": "0.1.4",
4 | "description": "Instantly generates GraphQL services from your MongoDB/mongoose schemas and Apollo server for testing, with the ability to deploy them to docker containers",
5 | "main": "index.js",
6 | "scripts": {
7 | "test": "jest",
8 | "test:coverage": "jest --coverage",
9 | "start": "node server.js",
10 | "dev": "nodemon server.js"
11 | },
12 | "repository": {
13 | "type": "git",
14 | "url": "git+https://github.com/Traversal-Labs/starfleet.git"
15 | },
16 | "keywords": [
17 | "GraphQL",
18 | "MongoDB",
19 | "mongoose",
20 | "NodeJS"
21 | ],
22 | "author": "Traversal Labs",
23 | "license": "MIT",
24 | "bugs": {
25 | "url": "https://github.com/Traversal-Labs/starfleet/issues"
26 | },
27 | "bin": {
28 | "starfleet": "./bin/starfleet.js"
29 | },
30 | "homepage": "https://github.com/Traversal-Labs/starfleet#readme",
31 | "dependencies": {
32 | "@shelf/jest-mongodb": "^1.1.3",
33 | "chalk": "^3.0.0",
34 | "commander": "^4.0.1",
35 | "docker": "^1.0.0",
36 | "docker-compose": "^0.23.1",
37 | "express": "^4.17.1",
38 | "express-graphql": "^0.9.0",
39 | "find": "^0.3.0",
40 | "graphql": "^14.5.8",
41 | "graphql-compose": "^7.6.1",
42 | "graphql-compose-connection": "^6.0.3",
43 | "graphql-compose-mongoose": "^7.3.0",
44 | "graphql-compose-pagination": "^6.0.3",
45 | "graphql-tools": "^4.0.6",
46 | "inquirer": "^7.0.0",
47 | "mongoose": "^5.8.1",
48 | "shelljs": "^0.8.3"
49 | },
50 | "devDependencies": {
51 | "jest": "^24.9.0",
52 | "nodemon": "^2.0.2"
53 | },
54 | "jest": {
55 | "testEnvironment": "node"
56 | }
57 | }
58 |
--------------------------------------------------------------------------------