├── .eslint-bin └── pre-commit-hook.js ├── .eslintrc.js ├── .gitattributes ├── .github ├── ISSUE_TEMPLATE.md ├── PULL_REQUEST_TEMPLATE.md └── workflows │ └── build.yml ├── .gitignore ├── .npmignore ├── .releaserc.js ├── CHANGELOG.md ├── Dockerfile ├── LICENSE ├── README.md ├── SECURITY.md ├── assets ├── database-examples │ ├── meals │ │ ├── Dockerfile │ │ └── dump.sql │ └── movies │ │ ├── Dockerfile │ │ └── dump.sql └── logo-forestadmin.png ├── commitlint.config.js ├── context ├── application-context.js ├── index.js └── init.js ├── deserializers ├── application-token.js ├── environment.js ├── project.js └── user.js ├── docker-compose.yml ├── handlerbars ├── helpers │ ├── equal.js │ ├── indent.js │ ├── is-array.js │ ├── is-object.js │ ├── sum.js │ └── wrap-quotes.js ├── loader.js └── partials │ └── render-nested.js ├── husky.config.js ├── jest.config.js ├── lumber-generate.js ├── lumber-login.js ├── lumber-logout.js ├── lumber-update.js ├── lumber.js ├── package.json ├── serializers ├── application-token.js ├── environment.js ├── project.js └── user.js ├── services ├── analyzer │ ├── database-analyzer.js │ ├── mongo-collections-analyzer.js │ ├── mongo-embedded-analyzer.js │ ├── mongo-hasmany-analyzer.js │ ├── mongo-references-analyzer.js │ ├── mysql-table-constraints-getter.js │ ├── sequelize-column-type-getter.js │ ├── sequelize-default-value.js │ ├── sequelize-table-constraints-getter.js │ └── sequelize-tables-analyzer.js ├── api.js ├── application-token.js ├── authenticator.js ├── command-generate-config-getter.js ├── database.js ├── directory-existence-checker.js ├── dumper.js ├── error-handler.js ├── event-sender.js ├── key-generator.js ├── logger.js ├── oidc │ ├── authenticator.js │ └── error.js ├── project-creator.js ├── prompter │ ├── abstract-prompter.js │ ├── application-prompts.js │ ├── database-prompts.js │ ├── general-prompter.js │ ├── project-prompts.js │ ├── prompter-error.js │ └── user-prompts.js └── spinners.js ├── templates └── app │ ├── Dockerfile.hbs │ ├── app.hbs │ ├── config │ └── databases.hbs │ ├── docker-compose.hbs │ ├── dockerignore.hbs │ ├── env.hbs │ ├── forest │ └── collection.hbs │ ├── gitignore.hbs │ ├── middlewares │ ├── forestadmin.hbs │ └── welcome.hbs │ ├── models │ ├── index.hbs │ ├── mongo-model.hbs │ └── sequelize-model.hbs │ ├── public │ └── favicon.png │ ├── routes │ └── route.hbs │ ├── server.hbs │ └── views │ └── index.hbs ├── test-expected ├── mongo │ ├── db-analysis-output │ │ ├── complex-model-with-a-view.expected.json │ │ ├── deep-nested-fields.expected.json │ │ ├── hasmany.expected.json │ │ ├── many-nulls.expected.json │ │ ├── many-objectid-fields.expected.json │ │ ├── multiple-nested-array-of-objects-fields.expected.json │ │ ├── multiple-references-from-same-field.expected.json │ │ ├── nested-array-of-numbers-fields.expected.json │ │ ├── nested-array-of-objects-fields.expected.json │ │ ├── nested-object-fields.expected.json │ │ ├── simple.expected.json │ │ ├── sub-document-not-using-ids.expected.json │ │ ├── sub-document-using-ids.expected.json │ │ ├── sub-documents-ambiguous-ids.expected.json │ │ ├── sub-documents-not-using-ids.expected.json │ │ └── sub-documents-using-ids.expected.json │ └── dumper-output │ │ ├── databases.config.expected.js │ │ ├── deep-nested.expected.js │ │ ├── hasmany.expected.js │ │ ├── index.expected.js │ │ ├── nested-array-of-numbers.expected.js │ │ ├── nested-array-of-objects.expected.js │ │ ├── nested-object.expected.js │ │ ├── simple.expected.js │ │ ├── sub-document-not-using-ids.expected.js │ │ ├── sub-document-using-ids.expected.js │ │ ├── sub-documents-ambiguous-ids.expected.js │ │ ├── sub-documents-not-using-ids.expected.js │ │ └── sub-documents-using-ids.expected.js └── sequelize │ ├── constraints-getter-output │ ├── addresses.expected.js │ ├── customers.expected.js │ └── reviews.expected.js │ ├── db-analysis-output │ ├── addresses.expected.json │ ├── customers.expected.json │ ├── default_values.mssql.expected.js │ ├── default_values.mysql.expected.js │ ├── default_values.postgres.expected.js │ ├── doubleref.expected.json │ ├── duplicatedalias.expected.json │ ├── export.expected.json │ ├── only-foreign-keys-and-id.expected.json │ ├── owners.expected.json │ ├── parenthesis.expected.json │ ├── parenthesis_underscored.expected.json │ ├── parenthesis_underscored_true.expected.json │ ├── projects.expected.json │ ├── renderings.expected.json │ ├── rentals.expected.json │ └── users.expected.json │ └── dumper-output │ ├── addresses.expected.js │ ├── customers.expected.js │ ├── databases.config.expected.js │ ├── default-values.expected.js │ ├── env.darwin.expected │ ├── env.linux.expected │ ├── export.expected.js │ ├── export.expected.route.js │ ├── index.expected.js │ ├── only-foreign-keys-and-id.expected.js │ ├── owners.expected.js │ ├── parenthesis.expected.js │ ├── parenthesis_underscored.expected.js │ ├── parenthesis_underscored_true.expected.js │ ├── projects.expected.js │ ├── renderings.expected.js │ └── users.expected.js ├── test-fixtures ├── mongo │ ├── deep-nested-model.js │ ├── hasmany-model.js │ ├── many-nulls-model.js │ ├── many-objectid-fields-model.js │ ├── multiple-nested-array-of-objects-model.js │ ├── multiple-references-same-field-model.js │ ├── nested-array-of-numbers-model.js │ ├── nested-array-of-objects-model.js │ ├── nested-object-model.js │ ├── simple-model.js │ ├── sub-document-not-using-ids-model.js │ ├── sub-document-using-ids-model.js │ ├── sub-documents-ambiguous-ids-model.js │ ├── sub-documents-not-using-ids-model.js │ └── sub-documents-using-ids-model.js ├── mssql │ ├── addresses.sql │ ├── books.sql │ ├── cars.sql │ ├── customers.sql │ ├── default_values.sql │ ├── doubleref.sql │ ├── duplicatedalias.sql │ ├── only_foreign_keys_and_id.sql │ ├── owners.sql │ ├── parenthesis_table.sql │ ├── parenthesis_underscored_table.sql │ ├── projects.sql │ ├── rentals.sql │ ├── reviews.sql │ ├── sample_table.sql │ ├── underscored_no_fields.sql │ ├── user_books.sql │ └── users.sql ├── mysql │ ├── addresses.sql │ ├── books.sql │ ├── cars.sql │ ├── customers.sql │ ├── default_values.sql │ ├── doubleref.sql │ ├── duplicatedalias.sql │ ├── json.sql │ ├── only_foreign_keys_and_id.sql │ ├── owners.sql │ ├── parenthesis_table.sql │ ├── parenthesis_underscored_table.sql │ ├── projects.sql │ ├── rentals.sql │ ├── reviews.sql │ ├── sample_table.sql │ ├── underscored_no_fields.sql │ ├── user_books.sql │ └── users.sql └── postgres │ ├── addresses.sql │ ├── books.sql │ ├── cars.sql │ ├── customers.sql │ ├── default_values.sql │ ├── doubleref.sql │ ├── duplicatedalias.sql │ ├── employees.sql │ ├── json.sql │ ├── only_foreign_keys_and_id.sql │ ├── owners.sql │ ├── parenthesis_table.sql │ ├── parenthesis_underscored_table.sql │ ├── projects.sql │ ├── rentals.sql │ ├── reviews.sql │ ├── sample_table.sql │ ├── underscored_no_fields.sql │ ├── user_books.sql │ └── users.sql ├── test-utils ├── database-urls.js ├── mongo-helper.js ├── multiple-database-version-helper.js └── sequelize-helper.js ├── test ├── context │ ├── application-context.test.js │ └── init.test.js ├── deserializers │ └── application-token.unit.test.js ├── handlebars │ └── helpers │ │ └── wrap-quotes.unit.test.js ├── serializers │ └── application-token.unit.test.js ├── services │ ├── analyzer │ │ ├── mongo-collections-analyzer.test.js │ │ ├── mongo-embedded-analyzer.test.js │ │ └── sequelize-tables-analyzer.test.js │ ├── api.unit.test.js │ ├── application-token.unit.test.js │ ├── authenticator.unit.test.js │ ├── command-generate-config-getter.test.js │ ├── database-analyzer │ │ ├── database-analyzer-mongo.test.js │ │ └── database-analyzer-sequelize.test.js │ ├── database.unit.test.js │ ├── dumper │ │ ├── dumper-mongo.test.js │ │ ├── dumper-sequelize.test.js │ │ ├── dumper-sql.test.js │ │ ├── dumper.test.js │ │ └── dumper.unit.test.js │ ├── error-handler.unit.test.js │ ├── mysql-table-constraints-getter.test.js │ ├── oidc │ │ └── authenticator.test.js │ ├── prompter │ │ ├── application-prompts.test.js │ │ ├── database-prompts.test.js │ │ ├── general-prompter.test.js │ │ ├── project-prompts.test.js │ │ ├── prompt-utils.test.js │ │ └── user-prompts.test.js │ ├── sequelize-column-type-getter.test.js │ └── sequelize-table-constraints-getter.test.js ├── templates │ └── app │ │ └── server.test.js └── utils │ ├── errors.test.js │ ├── fields.test.js │ ├── mongo-primitive-type.test.js │ └── to-valid-package-name.test.js ├── utils ├── authenticator-helper.js ├── errors │ ├── application-token │ │ └── unable-to-create-application-token-error.js │ ├── database │ │ └── empty-database-error.js │ └── dumper │ │ ├── incompatible-liana-for-update-error.js │ │ └── invalid-lumber-project-structure-error.js ├── fields.js ├── lumber-error.js ├── messages.js ├── mongo-collections.js ├── mongo-primitive-type.js ├── regexs.js ├── strings.js ├── terminator.js └── to-valid-package-name.js └── yarn.lock /.eslint-bin/pre-commit-hook.js: -------------------------------------------------------------------------------- 1 | const { spawn } = require('child_process'); 2 | const simpleGit = require('simple-git')(`${__dirname}/..`); 3 | 4 | let listFilesModified = []; 5 | 6 | function excludeNonCommitedFiles(file) { 7 | return file.index !== 'D' // NOTICE: Deleted files 8 | && file.index !== ' ' // NOTICE: Files not staged for commit 9 | && file.index !== '?'; // NOTICE: Untracked files 10 | } 11 | 12 | function getFilesModified(callback) { 13 | simpleGit.status((error, status) => { 14 | if (error) { 15 | // eslint-disable-next-line no-console 16 | console.error(error); 17 | process.exit(-1); 18 | } 19 | 20 | listFilesModified = status.files 21 | .filter(excludeNonCommitedFiles) 22 | .map((file) => { 23 | if (file.index === 'R') { 24 | return file.path.substring(file.path.indexOf(' -> ') + 4); 25 | } 26 | return file.path; 27 | }) 28 | .filter((file) => file.endsWith('.js')); 29 | 30 | callback(); 31 | }); 32 | } 33 | 34 | function runEslint(callback) { 35 | if (listFilesModified.length === 0) { 36 | return callback(0); 37 | } 38 | 39 | // eslint-disable-next-line no-console 40 | console.log(`[ESLint] Validating changed files:\n${listFilesModified.join('\n')}`); 41 | const eslintPath = `${__dirname}/../node_modules/.bin/eslint`; 42 | const cmd = spawn(eslintPath, listFilesModified, { stdio: 'inherit', shell: true }); 43 | 44 | return cmd.on('exit', (code) => callback(code)); 45 | } 46 | 47 | getFilesModified((error) => { 48 | if (error) { 49 | // eslint-disable-next-line no-console 50 | console.error(error); 51 | process.exit(-2); 52 | } 53 | 54 | runEslint((code) => process.exit(code)); 55 | }); 56 | -------------------------------------------------------------------------------- /.eslintrc.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | root: true, 3 | extends: [ 4 | 'airbnb-base', 5 | 'plugin:jest/all', 6 | "plugin:sonarjs/recommended" 7 | ], 8 | plugins: [ 9 | 'sonarjs', 10 | ], 11 | env: { 12 | mocha: true, 13 | }, 14 | ignorePatterns: ["test-expected"], 15 | rules: { 16 | 'implicit-arrow-linebreak': 0, 17 | 'no-underscore-dangle': [ 18 | 'error', 19 | {'allow': ["_id"]}, 20 | ], 21 | 'import/no-extraneous-dependencies': [ 22 | 'error', 23 | { 24 | devDependencies: [ 25 | '.eslint-bin/*.js', 26 | 'test/**/*.js' 27 | ] 28 | } 29 | ], 30 | 'no-multiple-empty-lines': [ 31 | 'error', 32 | { 33 | max: 1, 34 | maxBOF: 0, 35 | maxEOF: 0 36 | } 37 | ], 38 | } 39 | }; 40 | -------------------------------------------------------------------------------- /.gitattributes: -------------------------------------------------------------------------------- 1 | assets/* linguist-generated=true 2 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE.md: -------------------------------------------------------------------------------- 1 | ## Expected behavior 2 | 3 | TODO: Please describe here the behavior you are expecting. 4 | 5 | ## Actual behavior 6 | 7 | TODO: What is the current behavior? 8 | 9 | ## Failure Logs 10 | 11 | TODO: Please include any relevant log snippets, if necessary. 12 | 13 | ## Context 14 | 15 | TODO: Please provide any relevant information about your setup. 16 | 17 | * Lumber Package Version: 18 | * Database Dialect: 19 | * Database Version: 20 | -------------------------------------------------------------------------------- /.github/PULL_REQUEST_TEMPLATE.md: -------------------------------------------------------------------------------- 1 | ## Definition of Done 2 | 3 | ### General 4 | 5 | - [ ] Write an explicit title for the Pull Request, following [Conventional Commits specification](https://www.conventionalcommits.org) 6 | - [ ] Test manually the implemented changes 7 | - [ ] Validate the code quality (indentation, syntax, style, simplicity, readability) 8 | - [ ] Ensure that Types have been updated according to your changes (if needed) 9 | 10 | ### Security 11 | 12 | - [ ] Consider the security impact of the changes made 13 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | .DS_Store 2 | .env 3 | coverage/ 4 | node_modules/ 5 | test-output/ 6 | -------------------------------------------------------------------------------- /.npmignore: -------------------------------------------------------------------------------- 1 | assets 2 | .github 3 | .githooks 4 | .eslint-bin 5 | -------------------------------------------------------------------------------- /.releaserc.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | branches: ['main'], 3 | plugins: [ 4 | '@semantic-release/commit-analyzer', 5 | '@semantic-release/release-notes-generator', 6 | '@semantic-release/changelog', 7 | '@semantic-release/npm', 8 | '@semantic-release/git', 9 | '@semantic-release/github', 10 | [ 11 | 'semantic-release-slack-bot', 12 | { 13 | markdownReleaseNotes: true, 14 | notifyOnSuccess: true, 15 | notifyOnFail: false, 16 | onSuccessTemplate: { 17 | text: "👾 $package_name@$npm_package_version has been released!", 18 | blocks: [{ 19 | type: 'section', 20 | text: { 21 | type: 'mrkdwn', 22 | text: '*New `$package_name` package released!*' 23 | } 24 | }, { 25 | type: 'context', 26 | elements: [{ 27 | type: 'mrkdwn', 28 | text: "👾 *Version:* <$repo_url/releases/tag/v$npm_package_version|$npm_package_version>" 29 | }] 30 | }, { 31 | type: 'divider', 32 | }], 33 | attachments: [{ 34 | blocks: [{ 35 | type: 'section', 36 | text: { 37 | type: 'mrkdwn', 38 | text: '*Changes* of version $release_notes', 39 | }, 40 | }], 41 | }], 42 | }, 43 | packageName: 'lumber-cli', 44 | } 45 | ], 46 | ], 47 | } 48 | -------------------------------------------------------------------------------- /Dockerfile: -------------------------------------------------------------------------------- 1 | FROM node:lts-jessie 2 | 3 | WORKDIR /usr/src/app 4 | RUN npm install -g lumber-cli -s 5 | 6 | VOLUME /usr/src/app 7 | 8 | EXPOSE $APPLICATION_PORT 9 | CMD lumber generate "${APPLICATION_NAME:-$APP_NAME}" \ 10 | -c "$DATABASE_URL" \ 11 | -S "${DATABASE_SSL:-false}" \ 12 | -s "$DATABASE_SCHEMA" \ 13 | -H "$APPLICATION_HOST" \ 14 | -p "$APPLICATION_PORT" \ 15 | --email "$FOREST_EMAIL" \ 16 | --token "$FOREST_TOKEN" && \ 17 | cd "${APPLICATION_NAME:-$APP_NAME}" && \ 18 | npm install -s 19 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | The MIT License (MIT) 2 | 3 | Copyright (c) 2019 FOREST ADMIN INC 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /SECURITY.md: -------------------------------------------------------------------------------- 1 | # Security Policy 2 | 3 | ## Reporting a Vulnerability 4 | 5 | To report a security vulnerability, please use the [Forest Admin security email](mailto:security@forestadmin.com). 6 | 7 | Our technical team will consider your request carefully. 8 | 9 | If the vulnerability report is accepted, Forest Admin will: 10 | - work on a fix of the current version with the highest priority, 11 | - let you know as soon as a new patched version is published. 12 | -------------------------------------------------------------------------------- /assets/database-examples/meals/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM postgres:11-alpine 2 | ENV POSTGRES_USER=lumber 3 | ENV POSTGRES_PASSWORD=secret 4 | ENV POSTGRES_DB=meals 5 | RUN apk add --no-cache curl 6 | RUN curl https://raw.githubusercontent.com/ForestAdmin/lumber/devel/assets/database-examples/meals/dump.sql -o /docker-entrypoint-initdb.d/dump.sql 7 | -------------------------------------------------------------------------------- /assets/database-examples/movies/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM postgres:11-alpine 2 | ENV POSTGRES_USER=lumber 3 | ENV POSTGRES_PASSWORD=secret 4 | ENV POSTGRES_DB=movies 5 | RUN apk add --no-cache curl 6 | RUN curl https://raw.githubusercontent.com/ForestAdmin/lumber/devel/assets/database-examples/movies/dump.sql -o /docker-entrypoint-initdb.d/dump.sql 7 | -------------------------------------------------------------------------------- /assets/logo-forestadmin.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ForestAdmin/lumber/d951e93ff075fc81c79142b296a953a9f06a330e/assets/logo-forestadmin.png -------------------------------------------------------------------------------- /commitlint.config.js: -------------------------------------------------------------------------------- 1 | // NOTICE: When a github "squash and merge" is performed, github add the PR link in the commit 2 | // message using the format ` (#)`. Github provide the target branch of the build, 3 | // so authorizing 4+5 = 9 characters more on main for the max header length should work 4 | // until we reach PR #99999. 5 | 6 | let maxLineLength = 100; 7 | 8 | const prExtrasChars = 9; 9 | 10 | const isPushEvent = process.env.GITHUB_EVENT_NAME === 'push'; 11 | 12 | if (isPushEvent) { 13 | maxLineLength += prExtrasChars; 14 | } 15 | 16 | module.exports = { 17 | extends: ['@commitlint/config-conventional'], 18 | rules: { 19 | 'header-max-length': [1, 'always', maxLineLength], 20 | 'body-max-line-length': [1, 'always', maxLineLength], 21 | 'footer-max-line-length': [1, 'always', maxLineLength], 22 | }, 23 | }; 24 | -------------------------------------------------------------------------------- /context/application-context.js: -------------------------------------------------------------------------------- 1 | /** 2 | * @template TContext 3 | */ 4 | class ApplicationContext { 5 | constructor() { 6 | /** @type {TContext} */ 7 | // @ts-ignore 8 | this.context = {}; 9 | } 10 | 11 | /** 12 | * @static @private 13 | * @param {*} Class 14 | * @returns {string} 15 | */ 16 | static _getInstanceName(Class) { 17 | const className = Class.name; 18 | return className.charAt(0).toLowerCase() + className.slice(1); 19 | } 20 | 21 | /** 22 | * @param {(ApplicationContext) => void} servicesBuilder 23 | */ 24 | init(servicesBuilder) { 25 | if (!servicesBuilder) throw new Error('missing services builder'); 26 | 27 | servicesBuilder(this); 28 | } 29 | 30 | /** 31 | * @returns {TContext} 32 | */ 33 | inject() { 34 | return this.context; 35 | } 36 | 37 | /** 38 | * @param {*} Class 39 | * @param {boolean} [overrides] 40 | * @returns {this} 41 | */ 42 | addClass(Class, overrides) { 43 | if (overrides) throw new Error('overrides are forbidden in application-context. Use test-application-context.js'); 44 | 45 | const instanceName = ApplicationContext._getInstanceName(Class); 46 | if (this.context[instanceName]) throw new Error(`existing class instance ${instanceName} in context`); 47 | this.context[instanceName] = new Class(this.context); 48 | 49 | return this; 50 | } 51 | 52 | /** 53 | * @param {string} name 54 | * @param {*} instance 55 | * @returns {this} 56 | */ 57 | addInstance(name, instance) { 58 | if (this.context[name]) throw new Error(`existing instance { key: '${name}'} in context`); 59 | this.context[name] = instance; 60 | return this; 61 | } 62 | 63 | /** 64 | * @param {string} name 65 | * @param {(param: any) => void} work 66 | * @returns {this} 67 | */ 68 | with(name, work) { 69 | work(this.context[name]); 70 | return this; 71 | } 72 | 73 | /** 74 | * No differences with addInstance for the moment, but we want to distinguish calls for clarity. 75 | * @param {string} name 76 | * @param {*} value 77 | * @returns {this} 78 | */ 79 | addValue(name, value) { 80 | return this.addInstance(name, value); 81 | } 82 | 83 | /** 84 | * No differences with addInstance for the moment, but we want to distinguish calls for clarity. 85 | * @param {string} name 86 | * @param {Function} value 87 | * @returns {this} 88 | */ 89 | addFunction(name, value) { 90 | return this.addInstance(name, value); 91 | } 92 | } 93 | 94 | module.exports = ApplicationContext; 95 | -------------------------------------------------------------------------------- /context/index.js: -------------------------------------------------------------------------------- 1 | const ApplicationContext = require('./application-context'); 2 | 3 | /** @type {import('./application-context')} */ 4 | const context = new ApplicationContext(); 5 | 6 | module.exports = context; 7 | -------------------------------------------------------------------------------- /deserializers/application-token.js: -------------------------------------------------------------------------------- 1 | const JSONAPIDeserializer = require('jsonapi-serializer').Deserializer; 2 | 3 | /** 4 | * @typedef {{ 5 | * id: string; 6 | * name: string; 7 | * token: string; 8 | * }} ApplicationToken 9 | */ 10 | 11 | const applicationTokenDeserializer = new JSONAPIDeserializer({ 12 | keyForAttribute: 'camelCase', 13 | }); 14 | 15 | module.exports = applicationTokenDeserializer; 16 | -------------------------------------------------------------------------------- /deserializers/environment.js: -------------------------------------------------------------------------------- 1 | const JSONAPIDeserializer = require('jsonapi-serializer').Deserializer; 2 | 3 | module.exports = new JSONAPIDeserializer({ 4 | keyForAttribute: 'camelCase', 5 | projects: { 6 | valueForRelationship: (relationship, included) => included, 7 | }, 8 | }); 9 | -------------------------------------------------------------------------------- /deserializers/project.js: -------------------------------------------------------------------------------- 1 | const JSONAPIDeserializer = require('jsonapi-serializer').Deserializer; 2 | 3 | module.exports = new JSONAPIDeserializer({ 4 | keyForAttribute: 'camelCase', 5 | environments: { 6 | valueForRelationship: (relationship, included) => included, 7 | }, 8 | renderings: { 9 | valueForRelationship: (relationship, included) => included, 10 | }, 11 | }); 12 | -------------------------------------------------------------------------------- /deserializers/user.js: -------------------------------------------------------------------------------- 1 | const JSONAPIDeserializer = require('jsonapi-serializer').Deserializer; 2 | 3 | module.exports = new JSONAPIDeserializer({ 4 | keyForAttribute: 'camelCase', 5 | }); 6 | -------------------------------------------------------------------------------- /docker-compose.yml: -------------------------------------------------------------------------------- 1 | MongoDB-MIN: 2 | image: mongo:3.2 3 | container_name : test_lumber_mongo_min 4 | ports: 5 | - "27015:27017" 6 | volumes: 7 | - ./data/mongodb/3.2:/var/lib/mongodb 8 | 9 | MongoDB-MAX: 10 | image: mongo:4.2 11 | container_name : test_lumber_mongo_max 12 | ports: 13 | - "27016:27017" 14 | volumes: 15 | - ./data/mongodb/4.2:/var/lib/mongodb 16 | 17 | PostgreSQL-MIN: 18 | image : postgres:9.4 19 | container_name : test_lumber_postgres_min 20 | ports : 21 | - "54368:5432" 22 | environment: 23 | - POSTGRES_DB=lumber-sequelize-test 24 | - POSTGRES_USER=forest 25 | - POSTGRES_PASSWORD=secret 26 | 27 | PostgreSQL-MAX: 28 | image : postgres:12.2 29 | container_name : test_lumber_postgres_max 30 | ports : 31 | - "54369:5432" 32 | environment: 33 | - POSTGRES_DB=lumber-sequelize-test 34 | - POSTGRES_USER=forest 35 | - POSTGRES_PASSWORD=secret 36 | 37 | MySQL-MIN: 38 | image: mysql:5.6 39 | container_name: test_lumber_mysql_min 40 | environment: 41 | MYSQL_ROOT_PASSWORD: secret 42 | MYSQL_DATABASE: lumber-sequelize-test 43 | MYSQL_USER: forest 44 | MYSQL_PASSWORD: secret 45 | ports: 46 | - "127.0.0.1:8998:3306" 47 | 48 | MySQL-MAX: 49 | image: mysql:8.0 50 | container_name: test_lumber_mysql_max 51 | environment: 52 | MYSQL_ROOT_PASSWORD: secret 53 | MYSQL_DATABASE: lumber-sequelize-test 54 | MYSQL_USER: forest 55 | MYSQL_PASSWORD: secret 56 | ports: 57 | - "127.0.0.1:8999:3306" 58 | 59 | MS-SQL-MIN: 60 | image: mcr.microsoft.com/mssql/server:2017-latest 61 | container_name: test_lumber_mssql_min 62 | environment: 63 | ACCEPT_EULA: "Y" 64 | SA_PASSWORD: "forest2019:" 65 | ports: 66 | - "1431:1433" 67 | 68 | MS-SQL-MAX: 69 | image: mcr.microsoft.com/mssql/server:2019-latest 70 | container_name: test_lumber_mssql_max 71 | environment: 72 | ACCEPT_EULA: "Y" 73 | SA_PASSWORD: "forest2019:" 74 | ports: 75 | - "1432:1433" 76 | -------------------------------------------------------------------------------- /handlerbars/helpers/equal.js: -------------------------------------------------------------------------------- 1 | const Handlebars = require('handlebars'); 2 | 3 | Handlebars.registerHelper('eq', (value1, value2) => value1 === value2); 4 | -------------------------------------------------------------------------------- /handlerbars/helpers/indent.js: -------------------------------------------------------------------------------- 1 | const Handlebars = require('handlebars'); 2 | 3 | Handlebars.registerHelper('indent', (level, value) => { 4 | return ' '.repeat(level * 2) + value; 5 | }); 6 | -------------------------------------------------------------------------------- /handlerbars/helpers/is-array.js: -------------------------------------------------------------------------------- 1 | const Handlebars = require('handlebars'); 2 | 3 | Handlebars.registerHelper('isArray', (value) => { 4 | return Array.isArray(value); 5 | }); 6 | -------------------------------------------------------------------------------- /handlerbars/helpers/is-object.js: -------------------------------------------------------------------------------- 1 | const Handlebars = require('handlebars'); 2 | 3 | Handlebars.registerHelper('isObject', (value) => { 4 | return typeof value === 'object'; 5 | }); 6 | -------------------------------------------------------------------------------- /handlerbars/helpers/sum.js: -------------------------------------------------------------------------------- 1 | const Handlebars = require('handlebars'); 2 | 3 | Handlebars.registerHelper('sum', (value1, value2) => { 4 | return value1 + value2; 5 | }); 6 | -------------------------------------------------------------------------------- /handlerbars/helpers/wrap-quotes.js: -------------------------------------------------------------------------------- 1 | const Handlebars = require('handlebars'); 2 | 3 | const wrapQuotes = (value1) => `'${value1}'`; 4 | 5 | Handlebars.registerHelper('wq', wrapQuotes); 6 | 7 | module.exports = wrapQuotes; 8 | -------------------------------------------------------------------------------- /handlerbars/loader.js: -------------------------------------------------------------------------------- 1 | // Notice: load partials 2 | require('./partials/render-nested'); 3 | 4 | // Notice: load helpers 5 | require('./helpers/equal'); 6 | require('./helpers/indent'); 7 | require('./helpers/is-array'); 8 | require('./helpers/is-object'); 9 | require('./helpers/sum'); 10 | require('./helpers/wrap-quotes'); 11 | -------------------------------------------------------------------------------- /handlerbars/partials/render-nested.js: -------------------------------------------------------------------------------- 1 | const Handlebars = require('handlebars'); 2 | 3 | Handlebars.registerPartial( 4 | 'renderNested', 5 | 6 | ` 7 | {{~#if (isArray type)}} 8 | [{{>renderNested type=type.[0] level=level}}] 9 | {{~else if (isObject type)}} 10 | { 11 | {{#each type}} 12 | {{#if (eq @key '_id')}} 13 | {{#if (eq this 'ambiguous')}} 14 | {{indent (sum ../level 1) '//'}} {{@key}}: false, Ambiguous usage of _ids, we could not detect if subDocuments use _id or not. 15 | {{else if (eq this false)}} 16 | {{indent (sum ../level 1) @key}}: {{this}}, 17 | {{/if}} 18 | {{else}} 19 | {{indent (sum ../level 1) (wq @key)}}: {{>renderNested type=this level=(sum ../level 1)}}, 20 | {{/if}} 21 | {{/each}} 22 | {{indent level '}'}} 23 | {{else}} 24 | {{type}} 25 | {{~/if}} 26 | `, 27 | ); 28 | -------------------------------------------------------------------------------- /husky.config.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | hooks: { 3 | 'pre-commit': 'node ./.eslint-bin/pre-commit-hook.js', 4 | 'commit-msg': 'commitlint -E HUSKY_GIT_PARAMS', 5 | }, 6 | }; 7 | -------------------------------------------------------------------------------- /jest.config.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | collectCoverageFrom: [ 3 | '**/*.{js,ts,tsx}', 4 | ], 5 | 6 | // An array of regexp pattern strings used to skip coverage collection 7 | coveragePathIgnorePatterns: [ 8 | '/coverage', 9 | '/.eslint-bin/', 10 | '/node_modules/', 11 | '/test/', 12 | '/test-*', 13 | '/*.config.js', 14 | ], 15 | 16 | // The test environment that will be used for testing 17 | testEnvironment: 'node', 18 | }; 19 | -------------------------------------------------------------------------------- /lumber-login.js: -------------------------------------------------------------------------------- 1 | const program = require('commander'); 2 | const inquirer = require('inquirer'); 3 | const context = require('./context'); 4 | const initContext = require('./context/init'); 5 | const { EMAIL_REGEX } = require('./utils/regexs'); 6 | 7 | initContext(context); 8 | 9 | const { 10 | logger, authenticator, oidcAuthenticator, errorHandler, applicationTokenService, 11 | } = context.inject(); 12 | 13 | if (!logger) throw new Error('Missing dependency logger'); 14 | if (!authenticator) throw new Error('Missing dependency authenticator'); 15 | if (!errorHandler) throw new Error('Missing dependency errorHandler'); 16 | if (!applicationTokenService) throw new Error('Missing dependency applicationTokenService'); 17 | 18 | program 19 | .description('Log into Forest Admin API') 20 | .option('-e, --email ', 'Your Forest Admin account email') 21 | .option('-P, --password ', 'Your Forest Admin account password (ignored if token is set)') 22 | .option('-t, --token ', 'Your Forest Admin account token (replaces password)') 23 | .parse(process.argv); 24 | 25 | (async () => { 26 | let { email, token } = program; 27 | const { password } = program; 28 | 29 | if (!token && !password) { 30 | const sessionToken = await oidcAuthenticator.authenticate(); 31 | token = await applicationTokenService.generateApplicationToken(sessionToken); 32 | } else { 33 | if (!email) { 34 | ({ email } = await inquirer.prompt([{ 35 | type: 'input', 36 | name: 'email', 37 | message: 'What\'s your email address?', 38 | validate: (input) => { 39 | if (EMAIL_REGEX.test(input)) { return true; } 40 | return input ? 'Invalid email' : 'Please enter your email address.'; 41 | }, 42 | }])); 43 | } 44 | 45 | token = await authenticator.loginWithEmailOrTokenArgv({ ...program, email }); 46 | } 47 | 48 | authenticator.saveToken(token); 49 | logger.success('Login successful'); 50 | process.exit(0); 51 | })().catch(async (error) => { 52 | await errorHandler.handle(error); 53 | }); 54 | -------------------------------------------------------------------------------- /lumber-logout.js: -------------------------------------------------------------------------------- 1 | const program = require('commander'); 2 | const context = require('./context'); 3 | const initContext = require('./context/init'); 4 | 5 | initContext(context); 6 | 7 | const { logger, authenticator } = context.inject(); 8 | 9 | if (!logger) throw new Error('Missing dependency logger'); 10 | if (!authenticator) throw new Error('Missing dependency authenticator'); 11 | 12 | program 13 | .description('Log out from Forest Admin API') 14 | .parse(process.argv); 15 | 16 | (async () => { 17 | const wasLoggedIn = await authenticator.logout(); 18 | if (wasLoggedIn) { 19 | logger.success('Logout successful'); 20 | } 21 | process.exit(0); 22 | })().catch(async (error) => { 23 | logger.error(error); 24 | process.exit(1); 25 | }); 26 | -------------------------------------------------------------------------------- /lumber.js: -------------------------------------------------------------------------------- 1 | #! /usr/bin/env node 2 | 3 | // __ _____ _____ _____ _____ _____ 4 | // | | | | | | __ | __| __ | 5 | // | |__| | | | | | __ -| __| -| 6 | // |_____|_____|_|_|_|_____|_____|__|__| 7 | 8 | const program = require('commander'); 9 | const packagejson = require('./package.json'); 10 | const context = require('./context'); 11 | const initContext = require('./context/init'); 12 | 13 | initContext(context); 14 | 15 | program 16 | .version(packagejson.version) 17 | .command('generate ', 'generate a backend application with an ORM/ODM configured') 18 | .command('login', 'log into Forest Admin API') 19 | .command('logout', 'log out from Forest Admin API') 20 | .command('update', 'update your project by generating files that does not currently exist') 21 | .parse(process.argv); 22 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "lumber-cli", 3 | "description": "Create your Forest Admin API in minutes. Admin API backend based on a database schema", 4 | "version": "4.2.0", 5 | "main": "lumber.js", 6 | "scripts": { 7 | "lint": "./node_modules/eslint/bin/eslint.js ./*.js .eslint-bin deserializers serializers services test utils", 8 | "test": "jest --runInBand", 9 | "test:coverage": "jest --runInBand --coverage" 10 | }, 11 | "bin": { 12 | "lumber": "lumber.js" 13 | }, 14 | "engines": { 15 | "node": ">= 10" 16 | }, 17 | "author": "Sandro Munda", 18 | "contributors": [ 19 | "Vincent Molinié ", 20 | "Valentin Lamatte ", 21 | "Arnaud Besnier ", 22 | "Guillaume Gautreau " 23 | ], 24 | "repository": "github:ForestAdmin/lumber", 25 | "license": "MIT", 26 | "dependencies": { 27 | "atob": "^2.1.2", 28 | "bluebird": "^3.4.6", 29 | "chalk": "^1.1.3", 30 | "commander": "^2.9.0", 31 | "dotenv": "^6.0.0", 32 | "handlebars": "4.7.6", 33 | "inquirer": "^6.2.0", 34 | "jsonapi-serializer": "^3.4.1", 35 | "lodash": "4.17.19", 36 | "mkdirp": "^1.0.4", 37 | "mongodb": "3.6.3", 38 | "mysql2": "2.2.5", 39 | "open": "^7.3.0", 40 | "openid-client": "^4.2.1", 41 | "pg": "8.2.1", 42 | "pluralize": "^8.0.0", 43 | "saslprep": "1.0.3", 44 | "sequelize": "5.22.0", 45 | "spinnies": "0.5.0", 46 | "superagent": "5.1.0", 47 | "tedious": "6.4.0", 48 | "validate-npm-package-name": "3.0.0" 49 | }, 50 | "devDependencies": { 51 | "@commitlint/cli": "11.0.0", 52 | "@commitlint/config-conventional": "11.0.0", 53 | "@semantic-release/changelog": "5.0.1", 54 | "@semantic-release/git": "9.0.0", 55 | "@types/jest": "^26.0.15", 56 | "eslint": "6.7.2", 57 | "eslint-config-airbnb-base": "14.0.0", 58 | "eslint-plugin-import": "2.18.2", 59 | "eslint-plugin-jest": "23.1.1", 60 | "eslint-plugin-sonarjs": "0.5.0", 61 | "git-hooks": "1.1.10", 62 | "husky": "4.2.3", 63 | "jest": "26.6.3", 64 | "rimraf": "3.0.0", 65 | "semantic-release": "17.4.2", 66 | "semantic-release-slack-bot": "1.6.2", 67 | "simple-git": "1.126.0", 68 | "sinon": "7.5.0" 69 | } 70 | } 71 | -------------------------------------------------------------------------------- /serializers/application-token.js: -------------------------------------------------------------------------------- 1 | const JSONAPISerializer = require('jsonapi-serializer').Serializer; 2 | 3 | /** 4 | * @typedef {{ name: string }} InputApplicationToken 5 | */ 6 | 7 | const applicationTokenSerializer = new JSONAPISerializer('application-tokens', { 8 | attributes: ['name'], 9 | }); 10 | 11 | module.exports = applicationTokenSerializer; 12 | -------------------------------------------------------------------------------- /serializers/environment.js: -------------------------------------------------------------------------------- 1 | const JSONAPISerializer = require('jsonapi-serializer').Serializer; 2 | 3 | function Environment(data) { 4 | return new JSONAPISerializer('environments', { 5 | attributes: ['name', 'apiEndpoint', 'project'], 6 | project: { 7 | ref: 'id', 8 | included: false, 9 | }, 10 | }).serialize(data); 11 | } 12 | 13 | module.exports = Environment; 14 | -------------------------------------------------------------------------------- /serializers/project.js: -------------------------------------------------------------------------------- 1 | const JSONAPISerializer = require('jsonapi-serializer').Serializer; 2 | 3 | function Project(data) { 4 | return new JSONAPISerializer('projects', { 5 | attributes: ['name'], 6 | keyForAttribute: 'underscore_case', 7 | }).serialize(data); 8 | } 9 | 10 | module.exports = Project; 11 | -------------------------------------------------------------------------------- /serializers/user.js: -------------------------------------------------------------------------------- 1 | const JSONAPISerializer = require('jsonapi-serializer').Serializer; 2 | 3 | function User(data) { 4 | return new JSONAPISerializer('users', { 5 | attributes: ['email', 'password', 'firstName', 'lastName'], 6 | keyForAttribute: 'underscore_case', 7 | }).serialize(data); 8 | } 9 | 10 | module.exports = User; 11 | -------------------------------------------------------------------------------- /services/analyzer/database-analyzer.js: -------------------------------------------------------------------------------- 1 | const analyzeMongoCollections = require('./mongo-collections-analyzer'); 2 | const analyzeSequelizeTables = require('./sequelize-tables-analyzer'); 3 | const EmptyDatabaseError = require('../../utils/errors/database/empty-database-error'); 4 | const { terminate } = require('../../utils/terminator'); 5 | 6 | async function reportEmptyDatabase(orm, dialect) { 7 | const logs = [`Your database looks empty! Please create some ${orm === 'mongoose' ? 'collections' : 'tables'} before running the command.`]; 8 | if (orm === 'sequelize') { 9 | logs.push('If not, check whether you are using a custom database schema (use in that case the --schema option).'); 10 | } 11 | return terminate(1, { 12 | logs, 13 | errorCode: 'database_empty', 14 | errorMessage: 'Your database is empty.', 15 | context: { 16 | orm, 17 | dialect, 18 | }, 19 | }); 20 | } 21 | 22 | function DatabaseAnalyzer(databaseConnection, config, allowWarning) { 23 | this.perform = async () => { 24 | let analyze; 25 | if (config.dbDialect === 'mongodb') { 26 | analyze = analyzeMongoCollections; 27 | } else { 28 | analyze = analyzeSequelizeTables; 29 | } 30 | return analyze(databaseConnection, config, allowWarning) 31 | .catch((error) => { 32 | if (error instanceof EmptyDatabaseError) { 33 | return reportEmptyDatabase(error.details.orm, error.details.dialect); 34 | } 35 | throw error; 36 | }); 37 | }; 38 | } 39 | 40 | module.exports = DatabaseAnalyzer; 41 | -------------------------------------------------------------------------------- /services/analyzer/mongo-hasmany-analyzer.js: -------------------------------------------------------------------------------- 1 | const _ = require('lodash'); 2 | const P = require('bluebird'); 3 | const { 4 | findCollectionMatchingSamples, 5 | filterReferenceCollection, 6 | } = require('../../utils/mongo-collections'); 7 | 8 | const OBJECT_ID_ARRAY = '[Mongoose.Schema.Types.ObjectId]'; 9 | const SAMPLE_COUNT_TO_FETCH = 10; 10 | const SAMPLE_COUNT_TO_FETCH_ARRAY = 5; 11 | 12 | const pickSampleValues = (databaseConnection, collectionName, field) => 13 | databaseConnection.collection(collectionName) 14 | .aggregate([ 15 | { $project: { [field.name]: { $slice: [`$${field.name}`, SAMPLE_COUNT_TO_FETCH_ARRAY] } } }, 16 | { $match: { [field.name]: { $ne: null } } }, 17 | { $sample: { size: SAMPLE_COUNT_TO_FETCH } }, 18 | { $unwind: `$${field.name}` }, 19 | { $project: { _id: false, value: `$${field.name}` } }, 20 | ]) 21 | .toArray() 22 | .then((samples) => _.map(samples, 'value')); 23 | 24 | const buildReference = (collectionName, referencedCollection, field) => { 25 | if (referencedCollection) { 26 | return { 27 | from: { collectionName, fieldName: field.name }, 28 | to: { collectionName: referencedCollection }, 29 | }; 30 | } 31 | return null; 32 | }; 33 | 34 | const detectReference = (databaseConnection, field, collectionName) => 35 | pickSampleValues(databaseConnection, collectionName, field) 36 | .then((samples) => findCollectionMatchingSamples(databaseConnection, samples)) 37 | .then((matches) => filterReferenceCollection(matches)) 38 | .then((referencedCollection) => buildReference(collectionName, referencedCollection, field)); 39 | 40 | const detectHasMany = (databaseConnection, fields, collectionName) => { 41 | const objectIdFields = fields.filter((field) => field.type === OBJECT_ID_ARRAY); 42 | return P.mapSeries( 43 | objectIdFields, 44 | (objectIdField) => detectReference(databaseConnection, objectIdField, collectionName), 45 | ).then((references) => references.filter((reference) => reference)); 46 | }; 47 | 48 | const applyHasMany = (fields, references) => 49 | references.forEach((reference) => { 50 | const field = _.find(fields, { name: reference.from.fieldName }); 51 | field.ref = reference.to.collectionName; 52 | field.hasMany = true; 53 | }); 54 | 55 | module.exports = { detectHasMany, applyHasMany }; 56 | -------------------------------------------------------------------------------- /services/analyzer/mongo-references-analyzer.js: -------------------------------------------------------------------------------- 1 | const _ = require('lodash'); 2 | const P = require('bluebird'); 3 | const { 4 | findCollectionMatchingSamples, 5 | filterReferenceCollection, 6 | } = require('../../utils/mongo-collections'); 7 | 8 | const OBJECT_ID = 'Mongoose.Schema.Types.ObjectId'; 9 | const SAMPLE_COUNT_TO_FETCH = 10; 10 | 11 | const pickSampleValues = (databaseConnection, collectionName, field) => 12 | databaseConnection.collection(collectionName) 13 | .aggregate([ 14 | { $match: { [field.name]: { $ne: null } } }, 15 | { $sample: { size: SAMPLE_COUNT_TO_FETCH } }, 16 | { $project: { _id: false, value: `$${field.name}` } }, 17 | ]) 18 | .toArray() 19 | .then((samples) => _.map(samples, 'value')); 20 | 21 | const buildReference = (collectionName, referencedCollection, field) => { 22 | if (referencedCollection) { 23 | return { 24 | from: { collectionName, fieldName: field.name }, 25 | to: { collectionName: referencedCollection }, 26 | }; 27 | } 28 | return null; 29 | }; 30 | 31 | const detectReference = (databaseConnection, field, collectionName) => 32 | pickSampleValues(databaseConnection, collectionName, field) 33 | .then((samples) => findCollectionMatchingSamples(databaseConnection, samples)) 34 | .then((matches) => filterReferenceCollection(matches)) 35 | .then((referencedCollection) => buildReference(collectionName, referencedCollection, field)); 36 | 37 | const detectReferences = (databaseConnection, fields, collectionName) => { 38 | const objectIdFields = fields.filter((field) => field.type === OBJECT_ID); 39 | return P.mapSeries( 40 | objectIdFields, 41 | (objectIdField) => detectReference(databaseConnection, objectIdField, collectionName), 42 | ).then((references) => references.filter((reference) => reference)); 43 | }; 44 | 45 | const applyReferences = (fields, references) => 46 | references.forEach((reference) => { 47 | const field = _.find(fields, { name: reference.from.fieldName }); 48 | field.ref = reference.to.collectionName; 49 | }); 50 | 51 | module.exports = { detectReferences, applyReferences }; 52 | -------------------------------------------------------------------------------- /services/analyzer/mysql-table-constraints-getter.js: -------------------------------------------------------------------------------- 1 | function MysqlTableConstraintsGetter(databaseConnection) { 2 | const queryInterface = databaseConnection.getQueryInterface(); 3 | 4 | // NOTICE: provide an array of array. Each inner array representing a (possibly composite) unique 5 | // index 6 | this.convertToUniqueIndexArray = (constraints) => { 7 | const uniqueIndexes = {}; 8 | constraints.filter((constraint) => constraint.columnType === 'UNIQUE') 9 | .forEach((constraint) => { 10 | uniqueIndexes[constraint.constraintName] = uniqueIndexes[constraint.constraintName] || []; 11 | uniqueIndexes[constraint.constraintName].push(constraint.columnName); 12 | }); 13 | const uniqueIndexArray = Object.values(uniqueIndexes); 14 | return uniqueIndexArray.length ? uniqueIndexArray : null; 15 | }; 16 | 17 | // NOTICE: This function exists only to create a structure compatible with the needed response. 18 | this.applyUniqueIndexArray = (constraints) => { 19 | if (constraints && constraints.length) { 20 | const uniqueIndexes = this.convertToUniqueIndexArray(constraints); 21 | // NOTICE: We apply the uniqueIndexes array to every element of the constraints array. 22 | return constraints.map((constraint) => ({ ...constraint, uniqueIndexes })); 23 | } 24 | return constraints; 25 | }; 26 | 27 | this.perform = async (table) => { 28 | const replacements = { table, schemaName: queryInterface.sequelize.config.database }; 29 | const query = ` 30 | SELECT DISTINCT 31 | tableConstraints.constraint_type AS columnType, 32 | tableConstraints.constraint_name AS constraintName, 33 | tableConstraints.table_name AS tableName, 34 | keyColumnUsage.column_name AS columnName, 35 | keyColumnUsage.referenced_table_name AS foreignTableName, 36 | keyColumnUsage.referenced_column_name AS foreignColumnName, 37 | uniqueIndexes.SEQ_IN_INDEX AS sequenceInIndex 38 | FROM INFORMATION_SCHEMA.TABLE_CONSTRAINTS AS tableConstraints 39 | JOIN INFORMATION_SCHEMA.key_column_usage AS keyColumnUsage 40 | ON tableConstraints.table_name = keyColumnUsage.table_name 41 | AND tableConstraints.constraint_name = keyColumnUsage.constraint_name 42 | LEFT JOIN INFORMATION_SCHEMA.STATISTICS AS uniqueIndexes 43 | ON keyColumnUsage.column_name = uniqueIndexes.column_name 44 | AND tableConstraints.constraint_name = uniqueIndexes.INDEX_NAME 45 | WHERE tableConstraints.table_schema = :schemaName 46 | AND tableConstraints.table_name = :table 47 | ORDER BY uniqueIndexes.SEQ_IN_INDEX; 48 | `; 49 | 50 | const constraints = (await queryInterface.sequelize 51 | .query(query, { type: queryInterface.sequelize.QueryTypes.SELECT, replacements })) 52 | // NOTICE: This map remove the `sequenceInIndex`property from the constraints. 53 | .map(({ sequenceInIndex, ...constraint }) => constraint); 54 | 55 | return this.applyUniqueIndexArray(constraints); 56 | }; 57 | } 58 | 59 | module.exports = MysqlTableConstraintsGetter; 60 | -------------------------------------------------------------------------------- /services/application-token.js: -------------------------------------------------------------------------------- 1 | const UnableToCreateApplicationTokenError = require('../utils/errors/application-token/unable-to-create-application-token-error'); 2 | 3 | class ApplicationTokenService { 4 | /** 5 | * @param {import("../context/init").Context} context 6 | */ 7 | constructor({ api, os }) { 8 | /** @private @readonly */ 9 | this.api = api; 10 | /** @private @readonly */ 11 | this.os = os; 12 | 13 | ['api', 'os'].forEach((name) => { 14 | if (!this[name]) throw new Error(`Missing dependency ${name}`); 15 | }); 16 | } 17 | 18 | /** 19 | * @param {string} sessionToken 20 | * @returns {Promise} 21 | */ 22 | async generateApplicationToken(sessionToken) { 23 | const hostname = this.os.hostname(); 24 | const inputToken = { 25 | name: `Lumber @${hostname}`, 26 | }; 27 | 28 | try { 29 | const applicationToken = await this.api.createApplicationToken(inputToken, sessionToken); 30 | 31 | return applicationToken.token; 32 | } catch (e) { 33 | throw new UnableToCreateApplicationTokenError({ reason: e.message }); 34 | } 35 | } 36 | 37 | /** 38 | * @param {string} token 39 | * @returns {Promise} 40 | */ 41 | async deleteApplicationToken(token) { 42 | try { 43 | await this.api.deleteApplicationToken(token); 44 | } catch (error) { 45 | if (error.status === 404) { 46 | return undefined; 47 | } 48 | 49 | throw error; 50 | } 51 | 52 | return undefined; 53 | } 54 | } 55 | 56 | module.exports = ApplicationTokenService; 57 | -------------------------------------------------------------------------------- /services/command-generate-config-getter.js: -------------------------------------------------------------------------------- 1 | const Prompter = require('./prompter/general-prompter'); 2 | 3 | const OPTIONS_DATABASE_MANDATORY = [ 4 | 'dbDialect', 5 | 'dbName', 6 | 'dbHostname', 7 | 'dbPort', 8 | 'dbUser', 9 | 'dbPassword', 10 | ]; 11 | const OPTIONS_DATABASE_OPTIONAL = [ 12 | 'dbSchema', 13 | 'ssl', 14 | 'mongodbSrv', 15 | ]; 16 | const OPTIONS_APPLICATION = [ 17 | 'appName', 18 | 'appHostname', 19 | 'appPort', 20 | 'email', 21 | ]; 22 | 23 | function CommandGenerateConfigGetter(program) { 24 | this.options = { 25 | forConnectionUrl: [ 26 | 'dbConnectionUrl', 27 | ...OPTIONS_DATABASE_OPTIONAL, 28 | ...OPTIONS_APPLICATION, 29 | ], 30 | forFullPrompt: [ 31 | ...OPTIONS_DATABASE_MANDATORY, 32 | ...OPTIONS_DATABASE_OPTIONAL, 33 | ...OPTIONS_APPLICATION, 34 | ], 35 | }; 36 | 37 | this.getOptions = () => { 38 | if (program.connectionUrl) { 39 | return this.options.forConnectionUrl; 40 | } 41 | 42 | return this.options.forFullPrompt; 43 | }; 44 | 45 | this.perform = async () => new Prompter(this.getOptions(), program).getConfig(); 46 | } 47 | 48 | module.exports = CommandGenerateConfigGetter; 49 | -------------------------------------------------------------------------------- /services/directory-existence-checker.js: -------------------------------------------------------------------------------- 1 | const fs = require('fs'); 2 | 3 | function DirectoryExistenceChecker(path, directory) { 4 | this.perform = () => { 5 | const directoryToCheck = `${path}/${directory}`; 6 | try { 7 | fs.accessSync(directoryToCheck, fs.F_OK); 8 | return true; 9 | } catch (error) { 10 | return false; 11 | } 12 | }; 13 | } 14 | 15 | module.exports = DirectoryExistenceChecker; 16 | -------------------------------------------------------------------------------- /services/error-handler.js: -------------------------------------------------------------------------------- 1 | const LumberError = require('../utils/lumber-error'); 2 | 3 | class ErrorHandler { 4 | /** 5 | * @param {import('../context/init').Context} context 6 | */ 7 | constructor(context) { 8 | /** @private @readonly */ 9 | this.terminator = context.terminator; 10 | /** @private @readonly */ 11 | this.chalk = context.chalk; 12 | /** @private @readonly */ 13 | this.messages = context.messages; 14 | 15 | ['terminator', 'chalk', 'messages'].forEach((name) => { 16 | if (!this[name]) throw new Error(`Missing dependency ${name}`); 17 | }); 18 | } 19 | 20 | /** 21 | * @private 22 | * @param {LumberError} error 23 | * @returns {string[]} 24 | */ 25 | getMessages(error) { 26 | const messages = []; 27 | if (error.reason) { 28 | messages.push(`${this.chalk.red(error.message)}: ${error.reason}`); 29 | } else { 30 | messages.push(this.chalk.red(error.message)); 31 | } 32 | 33 | if (error.possibleSolution) { 34 | messages.push(error.possibleSolution); 35 | } 36 | 37 | return messages; 38 | } 39 | 40 | /** 41 | * @param {Error} error 42 | */ 43 | async handle(error) { 44 | if (error instanceof LumberError) { 45 | await this.terminator.terminate(1, { 46 | logs: this.getMessages(error), 47 | }); 48 | } else { 49 | const message = `${this.messages.ERROR_UNEXPECTED} ${this.chalk.red(error.message)}`; 50 | await this.terminator.terminate(1, { 51 | logs: [message], 52 | }); 53 | } 54 | } 55 | } 56 | 57 | module.exports = ErrorHandler; 58 | -------------------------------------------------------------------------------- /services/event-sender.js: -------------------------------------------------------------------------------- 1 | const superagent = require('superagent'); 2 | 3 | const FOREST_URL = process.env.FOREST_URL || 'https://forestadmin-server.herokuapp.com'; 4 | 5 | class EventSender { 6 | constructor() { 7 | this.appName = null; 8 | this.command = null; 9 | } 10 | 11 | async notifyError(code = 'unknown_error', message = null, context = undefined) { 12 | if (!this.appName || !this.command) { return; } 13 | 14 | try { 15 | await superagent.post(`${FOREST_URL}/api/lumber/error`, { 16 | data: { 17 | type: 'events', 18 | attributes: { 19 | code, 20 | message, 21 | project_name: this.appName, 22 | command: this.command, 23 | context, 24 | }, 25 | }, 26 | }); 27 | } catch (e) { 28 | // NOTICE: We want silent error because this is just for reporting error 29 | // and not not blocking if that does not work. 30 | } 31 | } 32 | 33 | async notifySuccess() { 34 | if (!this.appName || !this.command) { return; } 35 | 36 | try { 37 | await superagent.post(`${FOREST_URL}/api/lumber/success`, { 38 | data: { 39 | type: 'events', 40 | attributes: { 41 | command: this.command, 42 | project_name: this.appName, 43 | }, 44 | }, 45 | }); 46 | } catch (e) { 47 | // NOTICE: We want silent error because this is just for reporting error 48 | // and not not blocking if that does not work. 49 | } 50 | } 51 | } 52 | 53 | module.exports = new EventSender(); 54 | -------------------------------------------------------------------------------- /services/key-generator.js: -------------------------------------------------------------------------------- 1 | const P = require('bluebird'); 2 | const crypto = require('crypto'); 3 | 4 | const randomBytes = P.promisify(crypto.randomBytes); 5 | 6 | function KeyGenerator() { 7 | this.generate = () => randomBytes(48).then((buffer) => buffer.toString('hex')); 8 | } 9 | 10 | module.exports = KeyGenerator; 11 | -------------------------------------------------------------------------------- /services/logger.js: -------------------------------------------------------------------------------- 1 | const chalk = require('chalk'); 2 | 3 | class Logger { 4 | constructor(silent) { 5 | this.silent = silent !== undefined 6 | ? silent 7 | : process.env.NODE_ENV === 'test'; 8 | this.spinner = null; 9 | } 10 | 11 | pauseSpinner() { 12 | if (this.spinner) { 13 | this.spinner.pause(); 14 | } 15 | } 16 | 17 | continueSpinner() { 18 | if (this.spinner) { 19 | this.spinner.continue(); 20 | } 21 | } 22 | 23 | log(message) { 24 | if (!this.silent) { 25 | // eslint-disable-next-line no-console 26 | console.log(message); 27 | } 28 | } 29 | 30 | logLine(color, message) { 31 | this.log(`${chalk[color]('>')} ${message}`); 32 | } 33 | 34 | logLines(color, messages) { 35 | messages.forEach((message) => this.logLine(color, message)); 36 | } 37 | 38 | success(...messages) { this.logLines('green', messages); } 39 | 40 | info(...messages) { this.logLines('blue', messages); } 41 | 42 | warn(...messages) { this.logLines('yellow', messages); } 43 | 44 | error(...messages) { this.logLines('red', messages); } 45 | } 46 | 47 | module.exports = new Logger(); 48 | -------------------------------------------------------------------------------- /services/oidc/error.js: -------------------------------------------------------------------------------- 1 | const openIdClient = require('openid-client'); 2 | const LumberError = require('../../utils/lumber-error'); 3 | 4 | class OidcError extends LumberError { 5 | /** 6 | * @param {string} message 7 | * @param {Error|undefined} origin 8 | * @param {string} [possibleSolution] 9 | */ 10 | constructor(message, origin, possibleSolution) { 11 | let reason; 12 | 13 | if (origin instanceof openIdClient.errors.OPError) { 14 | /** @public @readonly @type {string} */ 15 | reason = origin.error || origin.message; 16 | } else if (origin) { 17 | reason = origin.message; 18 | } 19 | 20 | super(message, undefined, { reason, possibleSolution }); 21 | 22 | this.name = 'OidcError'; 23 | } 24 | } 25 | 26 | module.exports = OidcError; 27 | -------------------------------------------------------------------------------- /services/project-creator.js: -------------------------------------------------------------------------------- 1 | const chalk = require('chalk'); 2 | const context = require('../context'); 3 | 4 | const { api } = context.inject(); 5 | 6 | const KeyGenerator = require('./key-generator'); 7 | const { terminate } = require('../utils/terminator'); 8 | const { ERROR_UNEXPECTED } = require('../utils/messages'); 9 | 10 | if (!api) throw new Error('Missing dependency api'); 11 | 12 | function ProjectCreator(sessionToken) { 13 | this.createProject = async (projectName, config) => { 14 | try { 15 | const newProject = await api.createProject(config, sessionToken, { name: projectName }); 16 | 17 | return { 18 | envSecret: newProject.defaultEnvironment.secretKey, 19 | authSecret: await new KeyGenerator().generate(), 20 | }; 21 | } catch (error) { 22 | let message; 23 | if (error.message === 'Unauthorized') { 24 | message = `Your session has expired. Please log back in with the command ${chalk.cyan('lumber login')}.`; 25 | } else if (error.message === 'Conflict') { 26 | message = 'A project with this name already exists. Please choose another name.'; 27 | } else { 28 | message = `${ERROR_UNEXPECTED} ${chalk.red(error)}`; 29 | } 30 | 31 | return terminate(1, { 32 | logs: [message], 33 | }); 34 | } 35 | }; 36 | } 37 | 38 | module.exports = ProjectCreator; 39 | -------------------------------------------------------------------------------- /services/prompter/abstract-prompter.js: -------------------------------------------------------------------------------- 1 | class AbstractPrompter { 2 | constructor(requests) { 3 | this.requests = requests; 4 | } 5 | 6 | isOptionRequested(option) { 7 | if (!option) { return false; } 8 | 9 | return this.requests.includes(option); 10 | } 11 | } 12 | 13 | module.exports = AbstractPrompter; 14 | -------------------------------------------------------------------------------- /services/prompter/application-prompts.js: -------------------------------------------------------------------------------- 1 | const AbstractPrompter = require('./abstract-prompter'); 2 | 3 | class ApplicationPrompts extends AbstractPrompter { 4 | constructor(requests, envConfig, prompts, program) { 5 | super(requests); 6 | this.envConfig = envConfig; 7 | this.prompts = prompts; 8 | this.program = program; 9 | } 10 | 11 | async handlePrompts() { 12 | this.handleHostname(); 13 | this.handlePort(); 14 | } 15 | 16 | handleHostname() { 17 | if (this.isOptionRequested('appHostname')) { 18 | this.envConfig.appHostname = this.program.applicationHost; 19 | if (!this.envConfig.appHostname) { 20 | this.prompts.push({ 21 | type: 'input', 22 | name: 'appHostname', 23 | message: 'What\'s the IP/hostname on which your application will be running? ', 24 | default: 'http://localhost', 25 | validate: (hostname) => { 26 | if (!/^https?:\/\/.*/i.test(hostname)) { 27 | return 'Application hostname must be a valid url.'; 28 | } 29 | if (!/^http((s:\/\/.*)|(s?:\/\/(localhost|127\.0\.0\.1).*))/i.test(hostname)) { 30 | return 'HTTPS protocol is mandatory, except for localhost and 127.0.0.1.'; 31 | } 32 | return true; 33 | }, 34 | }); 35 | } 36 | } 37 | } 38 | 39 | handlePort() { 40 | if (this.isOptionRequested('appPort')) { 41 | this.envConfig.appPort = this.program.applicationPort; 42 | if (!this.envConfig.appPort) { 43 | this.prompts.push({ 44 | type: 'input', 45 | name: 'appPort', 46 | message: 'What\'s the port on which your application will be running? ', 47 | default: '3310', 48 | validate: (port) => { 49 | if (!/^\d+$/.test(port)) { 50 | return 'The port must be a number.'; 51 | } 52 | 53 | const parsedPort = parseInt(port, 10); 54 | if (parsedPort > 0 && parsedPort < 65536) { return true; } 55 | return 'This is not a valid port.'; 56 | }, 57 | }); 58 | } 59 | } 60 | } 61 | } 62 | 63 | module.exports = ApplicationPrompts; 64 | -------------------------------------------------------------------------------- /services/prompter/general-prompter.js: -------------------------------------------------------------------------------- 1 | const inquirer = require('inquirer'); 2 | const _ = require('lodash'); 3 | const ApplicationPrompt = require('./application-prompts'); 4 | const DatabasePrompt = require('./database-prompts'); 5 | const ProjectPrompt = require('./project-prompts'); 6 | const PromptError = require('./prompter-error'); 7 | const UserPrompt = require('./user-prompts'); 8 | const Terminator = require('../../utils/terminator'); 9 | 10 | class GeneralPrompter { 11 | constructor(requests, program) { 12 | this.prompts = []; 13 | this.program = program; 14 | this.envConfig = {}; 15 | 16 | this.projectPrompt = new ProjectPrompt(requests, this.envConfig, program); 17 | this.databasePrompt = new DatabasePrompt(requests, this.envConfig, this.prompts, program); 18 | this.applicationPrompt = new ApplicationPrompt(requests, this.envConfig, this.prompts, program); 19 | this.userPrompt = new UserPrompt(requests, this.envConfig, this.prompts, program); 20 | 21 | this.initSourceDirectory(); 22 | } 23 | 24 | initSourceDirectory() { 25 | if (this.program.sourceDirectory) { 26 | this.envConfig.sourceDirectory = this.program.sourceDirectory; 27 | } else { 28 | this.envConfig.sourceDirectory = process.cwd(); 29 | } 30 | } 31 | 32 | async getConfig() { 33 | try { 34 | await this.projectPrompt.handlePrompts(); 35 | await this.databasePrompt.handlePrompts(); 36 | await this.applicationPrompt.handlePrompts(); 37 | await this.userPrompt.handlePrompts(); 38 | } catch (error) { 39 | if (error instanceof PromptError) { 40 | await Terminator.terminate(1, { 41 | errorCode: error.errorCode, 42 | errorMessage: error.errorMessage, 43 | logs: error.logs, 44 | }); 45 | } else { 46 | throw error; 47 | } 48 | } 49 | 50 | this.config = await inquirer.prompt(this.prompts); 51 | 52 | this.cleanConfigOptions(); 53 | 54 | return _.merge(this.config, this.envConfig); 55 | } 56 | 57 | cleanConfigOptions() { 58 | if (!this.config) { return; } 59 | 60 | // NOTICE: Remove the dbPassword if there's no password for the DB 61 | // connection. 62 | if (!this.config.dbPassword) { delete this.config.dbPassword; } 63 | } 64 | } 65 | 66 | module.exports = GeneralPrompter; 67 | -------------------------------------------------------------------------------- /services/prompter/project-prompts.js: -------------------------------------------------------------------------------- 1 | const chalk = require('chalk'); 2 | const DirectoryExistenceChecker = require('../directory-existence-checker'); 3 | const AbstractPrompter = require('./abstract-prompter'); 4 | const PrompterError = require('./prompter-error'); 5 | const messages = require('../../utils/messages'); 6 | 7 | class ProjectPrompts extends AbstractPrompter { 8 | constructor(requests, envConfig, program) { 9 | super(requests); 10 | this.envConfig = envConfig; 11 | this.program = program; 12 | } 13 | 14 | async handlePrompts() { 15 | await this.handleName(); 16 | } 17 | 18 | async handleName() { 19 | if (this.isOptionRequested('appName')) { 20 | const [projectName] = this.program.args; 21 | 22 | if (!projectName) { 23 | throw new PrompterError( 24 | messages.ERROR_MISSING_PROJECT_NAME, 25 | [ 26 | messages.ERROR_MISSING_PROJECT_NAME, 27 | messages.HINT_MISSING_PROJECT_NAME, 28 | ], 29 | ); 30 | } else if (new DirectoryExistenceChecker(process.cwd(), projectName).perform()) { 31 | const message = `The directory ${chalk.red(`${process.cwd()}/${projectName}`)} already exists.`; 32 | throw new PrompterError( 33 | message, 34 | [ 35 | message, 36 | messages.HINT_DIRECTORY_ALREADY_EXISTS, 37 | ], 38 | ); 39 | } else { 40 | this.envConfig.appName = projectName; 41 | } 42 | } 43 | } 44 | } 45 | 46 | module.exports = ProjectPrompts; 47 | -------------------------------------------------------------------------------- /services/prompter/prompter-error.js: -------------------------------------------------------------------------------- 1 | class PrompterError extends Error { 2 | constructor(errorMessage, logs) { 3 | super(errorMessage); 4 | this.errorCode = 'unexpected_error'; 5 | this.errorMessage = errorMessage; 6 | this.logs = logs; 7 | } 8 | } 9 | 10 | module.exports = PrompterError; 11 | -------------------------------------------------------------------------------- /services/prompter/user-prompts.js: -------------------------------------------------------------------------------- 1 | const AbstractPrompter = require('./abstract-prompter'); 2 | 3 | class UserPrompts extends AbstractPrompter { 4 | constructor(requests, envConfig, prompts, program) { 5 | super(requests); 6 | this.envConfig = envConfig; 7 | this.prompts = prompts; 8 | this.program = program; 9 | } 10 | 11 | async handlePrompts() { 12 | this.handleEmail(); 13 | this.handlePassword(); 14 | this.handleToken(); 15 | } 16 | 17 | handleEmail() { 18 | if (this.isOptionRequested('email')) { 19 | this.envConfig.email = this.program.email; 20 | 21 | if (!this.envConfig.email) { 22 | this.prompts.push({ 23 | type: 'input', 24 | name: 'email', 25 | message: 'What\'s your email address? ', 26 | validate: (email) => { 27 | if (email) { return true; } 28 | return 'Please enter your email address.'; 29 | }, 30 | }); 31 | } 32 | } 33 | } 34 | 35 | handlePassword() { 36 | this.envConfig.password = this.program.password; 37 | } 38 | 39 | handleToken() { 40 | this.envConfig.token = this.program.token; 41 | } 42 | } 43 | 44 | module.exports = UserPrompts; 45 | -------------------------------------------------------------------------------- /services/spinners.js: -------------------------------------------------------------------------------- 1 | const Spinnies = require('spinnies'); 2 | const logger = require('./logger'); 3 | 4 | const spinnies = new Spinnies({ 5 | spinnerColor: 'blue', 6 | spinner: { 7 | interval: 80, 8 | frames: ['⠋', '⠙', '⠹', '⠸', '⠼', '⠴', '⠦', '⠧', '⠇', '⠏'], 9 | }, 10 | }); 11 | 12 | module.exports = { 13 | add(key, options, promise = null) { 14 | spinnies.add(key, options); 15 | 16 | const spinner = { 17 | succeed(succeedOptions) { 18 | spinnies.succeed(key, succeedOptions); 19 | }, 20 | fail(failOptions) { 21 | spinnies.fail(key, failOptions); 22 | }, 23 | pause() { 24 | spinnies.remove(key); 25 | spinnies.stopAll(); 26 | }, 27 | continue() { 28 | spinnies.add(key, options); 29 | }, 30 | }; 31 | logger.spinner = spinner; 32 | 33 | if (promise) { 34 | promise 35 | .then((result) => { 36 | logger.spinner = null; 37 | spinner.succeed(); 38 | return result; 39 | }) 40 | .catch((error) => { 41 | logger.spinner = null; 42 | spinner.fail(); 43 | throw error; 44 | }); 45 | } 46 | 47 | return spinner; 48 | }, 49 | }; 50 | -------------------------------------------------------------------------------- /templates/app/Dockerfile.hbs: -------------------------------------------------------------------------------- 1 | FROM node:10-alpine 2 | WORKDIR /usr/src/app 3 | COPY package*.json ./ 4 | RUN npm install lumber-cli -g -s 5 | RUN npm install -s 6 | COPY . . 7 | EXPOSE ${APPLICATION_PORT} 8 | CMD ["npm", "start"] 9 | -------------------------------------------------------------------------------- /templates/app/app.hbs: -------------------------------------------------------------------------------- 1 | const express = require('express'); 2 | const requireAll = require('require-all'); 3 | const path = require('path'); 4 | const cookieParser = require('cookie-parser'); 5 | const bodyParser = require('body-parser'); 6 | const cors = require('cors'); 7 | const jwt = require('express-jwt'); 8 | const morgan = require('morgan'); 9 | const { 10 | errorHandler, 11 | ensureAuthenticated, 12 | PUBLIC_ROUTES, 13 | } = require('forest-express-{{#if isMongoDB }}mongoose{{else}}sequelize{{/if}}'); 14 | 15 | const app = express(); 16 | 17 | let allowedOrigins = [/\.forestadmin\.com$/{{#if forestUrl }}, /localhost:\d{4}$/{{/if}}]; 18 | 19 | if (process.env.CORS_ORIGINS) { 20 | allowedOrigins = allowedOrigins.concat(process.env.CORS_ORIGINS.split(',')); 21 | } 22 | 23 | const corsConfig = { 24 | origin: allowedOrigins, 25 | maxAge: 86400, // NOTICE: 1 day 26 | credentials: true, 27 | }; 28 | 29 | app.use(morgan('tiny')); 30 | app.use('/forest/authentication', cors({ 31 | ...corsConfig, 32 | // The null origin is sent by browsers for redirected AJAX calls 33 | // we need to support this in authentication routes because OIDC 34 | // redirects to the callback route 35 | origin: corsConfig.origin.concat('null') 36 | })); 37 | app.use(cors(corsConfig)); 38 | app.use(bodyParser.json()); 39 | app.use(bodyParser.urlencoded({ extended: false })); 40 | app.use(cookieParser()); 41 | app.use(express.static(path.join(__dirname, 'public'))); 42 | 43 | app.use(jwt({ 44 | secret: process.env.FOREST_AUTH_SECRET, 45 | credentialsRequired: false, 46 | algorithms: ['HS256'], 47 | })); 48 | 49 | app.use('/forest', (request, response, next) => { 50 | if (PUBLIC_ROUTES.includes(request.url)) { 51 | return next(); 52 | } 53 | return ensureAuthenticated(request, response, next); 54 | }); 55 | 56 | requireAll({ 57 | dirname: path.join(__dirname, 'routes'), 58 | recursive: true, 59 | resolve: (Module) => app.use('/forest', Module), 60 | }); 61 | 62 | requireAll({ 63 | dirname: path.join(__dirname, 'middlewares'), 64 | recursive: true, 65 | resolve: (Module) => Module(app), 66 | }); 67 | 68 | app.use(errorHandler()); 69 | 70 | module.exports = app; 71 | -------------------------------------------------------------------------------- /templates/app/config/databases.hbs: -------------------------------------------------------------------------------- 1 | const path = require('path'); 2 | 3 | const databaseOptions = { 4 | {{#if isMongoDB}} 5 | useNewUrlParser: true, 6 | useUnifiedTopology: true, 7 | {{else}} 8 | logging: !process.env.NODE_ENV || process.env.NODE_ENV === 'development' ? console.log : false, 9 | pool: { maxConnections: 10, minConnections: 1 }, 10 | dialectOptions: {}, 11 | {{/if}} 12 | }; 13 | {{#if isMySQL}} 14 | 15 | databaseOptions.dialectOptions.typeCast = (field, useDefaultTypeCasting) => { 16 | if ((field.type === "BIT") && (field.length === 1)) { 17 | const bytes = field.buffer(); 18 | return bytes ? bytes[0] === 1 : bytes; 19 | } 20 | 21 | return useDefaultTypeCasting(); 22 | }; 23 | {{/if}} 24 | {{#unless isMongoDB}} 25 | 26 | if (process.env.DATABASE_SSL && JSON.parse(process.env.DATABASE_SSL.toLowerCase())) { 27 | {{#if isMySQL}} 28 | databaseOptions.dialectOptions.ssl = { rejectUnauthorized: true }; 29 | {{else if isMSSQL}} 30 | databaseOptions.dialectOptions.options = { encrypt: true }; 31 | {{else}} 32 | const rejectUnauthorized = process.env.DATABASE_REJECT_UNAUTHORIZED; 33 | if (rejectUnauthorized && (JSON.parse(rejectUnauthorized.toLowerCase()) === false)) { 34 | databaseOptions.dialectOptions.ssl = { rejectUnauthorized: false }; 35 | } else { 36 | databaseOptions.dialectOptions.ssl = true; 37 | } 38 | {{/if}} 39 | } 40 | {{/unless}} 41 | 42 | module.exports = [{ 43 | name: 'default', 44 | modelsDir: path.resolve(__dirname, '../models'), 45 | connection: { 46 | url: process.env.DATABASE_URL, 47 | options: { ...databaseOptions }, 48 | }, 49 | }]; 50 | -------------------------------------------------------------------------------- /templates/app/docker-compose.hbs: -------------------------------------------------------------------------------- 1 | version: '3.4' 2 | services: 3 | app: 4 | build: 5 | context: . 6 | dockerfile: Dockerfile 7 | {{#if network}} 8 | network: {{network}} 9 | {{/if}} 10 | container_name: {{ containerName }} 11 | environment: 12 | - APPLICATION_PORT=${APPLICATION_PORT} 13 | - APPLICATION_URL=${APPLICATION_URL} 14 | - DATABASE_URL={{ databaseUrl }} 15 | {{#if dbSchema }} 16 | - DATABASE_SCHEMA=${DATABASE_SCHEMA} 17 | {{/if}} 18 | - DATABASE_SSL=${DATABASE_SSL} 19 | - FOREST_AUTH_SECRET=${FOREST_AUTH_SECRET} 20 | - FOREST_ENV_SECRET=${FOREST_ENV_SECRET} 21 | {{#if forestUrl }} 22 | - NODE_TLS_REJECT_UNAUTHORIZED=0 23 | - FOREST_URL={{ forestUrl }} 24 | {{/if}} 25 | ports: 26 | - "${APPLICATION_PORT}:${APPLICATION_PORT}" 27 | volumes: 28 | - ./:/usr/src/app 29 | -------------------------------------------------------------------------------- /templates/app/dockerignore.hbs: -------------------------------------------------------------------------------- 1 | node_modules 2 | npm-debug.log 3 | .env 4 | -------------------------------------------------------------------------------- /templates/app/env.hbs: -------------------------------------------------------------------------------- 1 | APPLICATION_PORT={{ port }} 2 | {{#if applicationUrl}} 3 | APPLICATION_URL={{ applicationUrl }} 4 | {{/if}} 5 | 6 | CORS_ORIGINS= 7 | 8 | DATABASE_URL={{ databaseUrl }} 9 | {{#if hasDockerDatabaseUrl}} 10 | DOCKER_DATABASE_URL={{ dockerDatabaseUrl }} 11 | {{/if}} 12 | {{#if dbSchema }} 13 | DATABASE_SCHEMA={{ dbSchema }} 14 | {{/if}} 15 | DATABASE_SSL={{ ssl }} 16 | # This should be removed in production environment. 17 | DATABASE_REJECT_UNAUTHORIZED=false 18 | 19 | FOREST_AUTH_SECRET={{ forestAuthSecret }} 20 | FOREST_ENV_SECRET={{ forestEnvSecret }} 21 | -------------------------------------------------------------------------------- /templates/app/forest/collection.hbs: -------------------------------------------------------------------------------- 1 | const { collection } = require('forest-express-{{#if isMongoDB }}mongoose{{else}}sequelize{{/if}}'); 2 | 3 | // This file allows you to add to your Forest UI: 4 | // - Smart actions: https://docs.forestadmin.com/documentation/reference-guide/actions/create-and-manage-smart-actions 5 | // - Smart fields: https://docs.forestadmin.com/documentation/reference-guide/fields/create-and-manage-smart-fields 6 | // - Smart relationships: https://docs.forestadmin.com/documentation/reference-guide/relationships/create-a-smart-relationship 7 | // - Smart segments: https://docs.forestadmin.com/documentation/reference-guide/segments/smart-segments 8 | collection('{{ table }}', { 9 | actions: [], 10 | fields: [], 11 | segments: [], 12 | }); 13 | -------------------------------------------------------------------------------- /templates/app/gitignore.hbs: -------------------------------------------------------------------------------- 1 | .DS_Store 2 | node_modules 3 | .env 4 | -------------------------------------------------------------------------------- /templates/app/middlewares/forestadmin.hbs: -------------------------------------------------------------------------------- 1 | const chalk = require('chalk'); 2 | const path = require('path'); 3 | {{#if isMongoDB}} 4 | const Liana = require('forest-express-mongoose'); 5 | {{else}} 6 | const Liana = require('forest-express-sequelize'); 7 | {{/if}} 8 | const { objectMapping, connections } = require('../models'); 9 | 10 | module.exports = async function forestadmin(app) { 11 | app.use(await Liana.init({ 12 | configDir: path.join(__dirname, '../forest'), 13 | envSecret: process.env.FOREST_ENV_SECRET, 14 | authSecret: process.env.FOREST_AUTH_SECRET, 15 | objectMapping, 16 | connections, 17 | })); 18 | 19 | console.log(chalk.cyan('Your admin panel is available here: https://app.forestadmin.com/projects')); 20 | }; 21 | -------------------------------------------------------------------------------- /templates/app/middlewares/welcome.hbs: -------------------------------------------------------------------------------- 1 | const path = require('path'); 2 | 3 | module.exports = function welcome(app) { 4 | app.get('/', (req, res) => { 5 | res.sendFile(path.join(__dirname, '../views/index.html')); 6 | }); 7 | }; 8 | -------------------------------------------------------------------------------- /templates/app/models/index.hbs: -------------------------------------------------------------------------------- 1 | const fs = require('fs'); 2 | const path = require('path'); 3 | {{#if isMongoDB}} 4 | const Mongoose = require('mongoose'); 5 | {{else}} 6 | const Sequelize = require('sequelize'); 7 | {{/if}} 8 | 9 | const databasesConfiguration = require('../config/databases'); 10 | 11 | const connections = {}; 12 | const db = {}; 13 | 14 | databasesConfiguration.forEach((databaseInfo) => { 15 | {{#if isMongoDB}} 16 | const connection = Mongoose.createConnection(databaseInfo.connection.url, databaseInfo.connection.options); 17 | {{else}} 18 | const connection = new Sequelize(databaseInfo.connection.url, databaseInfo.connection.options); 19 | {{/if}} 20 | connections[databaseInfo.name] = connection; 21 | 22 | const modelsDir = databaseInfo.modelsDir || path.join(__dirname, databaseInfo.name); 23 | fs 24 | .readdirSync(modelsDir) 25 | .filter((file) => file.indexOf('.') !== 0 && file !== 'index.js') 26 | .forEach((file) => { 27 | try { 28 | {{#if isMongoDB}} 29 | const model = require(path.join(modelsDir, file))(connection, Mongoose); 30 | db[model.modelName] = model; 31 | {{else}} 32 | const model = connection.import(path.join(modelsDir, file)); 33 | db[model.name] = model; 34 | {{/if}} 35 | } catch (error) { 36 | console.error(`Model creation error: ${error}`); 37 | } 38 | }); 39 | }); 40 | {{#unless isMongoDB}} 41 | 42 | Object.keys(db).forEach((modelName) => { 43 | if ('associate' in db[modelName]) { 44 | db[modelName].associate(db); 45 | } 46 | }); 47 | {{/unless}} 48 | 49 | {{#if isMongoDB}} 50 | db.objectMapping = Mongoose; 51 | {{else}} 52 | db.objectMapping = Sequelize; 53 | {{/if}} 54 | db.connections = connections; 55 | 56 | module.exports = db; 57 | -------------------------------------------------------------------------------- /templates/app/models/mongo-model.hbs: -------------------------------------------------------------------------------- 1 | // This model was generated by Lumber. However, you remain in control of your models. 2 | // Learn how here: https://docs.forestadmin.com/documentation/v/v6/reference-guide/models/enrich-your-models 3 | 4 | module.exports = (mongoose, Mongoose) => { 5 | // This section contains the properties of your model, mapped to your collection's properties. 6 | // Learn more here: https://docs.forestadmin.com/documentation/v/v6/reference-guide/models/enrich-your-models#declaring-a-new-field-in-a-model 7 | const schema = Mongoose.Schema({ 8 | {{#each fields as |field|}} 9 | {{wq field.name}}: {{#if field.ref}}{ type: {{field.type}}, ref: '{{field.ref}}' }{{else if (isObject field.type)}}{{>renderNested type=field.type level=2}}{{else}}{{field.type}}{{/if}}, 10 | {{/each}} 11 | }, { 12 | timestamps: {{timestamps}}, 13 | }); 14 | 15 | return mongoose.model('{{modelName}}', schema, '{{table}}'); 16 | }; 17 | -------------------------------------------------------------------------------- /templates/app/models/sequelize-model.hbs: -------------------------------------------------------------------------------- 1 | // This model was generated by Lumber. However, you remain in control of your models. 2 | // Learn how here: https://docs.forestadmin.com/documentation/v/v6/reference-guide/models/enrich-your-models 3 | module.exports = (sequelize, DataTypes) => { 4 | const { Sequelize } = sequelize; 5 | // This section contains the fields of your model, mapped to your table's columns. 6 | // Learn more here: https://docs.forestadmin.com/documentation/v/v6/reference-guide/models/enrich-your-models#declaring-a-new-field-in-a-model 7 | const {{modelVariableName}} = sequelize.define('{{modelName}}', { 8 | {{#each fields as |field|}} 9 | {{#if field.hasParenthesis}}'{{/if}}{{field.name}}{{#if field.hasParenthesis}}'{{/if}}: { 10 | type: DataTypes.{{{field.type}}},{{#if field.nameColumnUnconventional}} 11 | field: '{{field.nameColumn}}',{{/if}}{{#if field.primaryKey}} 12 | primaryKey: true,{{/if}}{{#if field.hasSafeDefaultValue}} 13 | defaultValue: {{{field.safeDefaultValue}}},{{/if}}{{#if field.isRequired}} 14 | allowNull: false,{{/if}} 15 | }, 16 | {{/each}} 17 | }, { 18 | tableName: '{{table}}',{{#if underscored}} 19 | underscored: true,{{/if}}{{#unless timestamps}} 20 | timestamps: false,{{/unless}}{{#if schema}} 21 | schema: process.env.DATABASE_SCHEMA,{{/if}} 22 | }); 23 | {{#if noId}} {{modelVariableName}}.removeAttribute('id');{{/if}} 24 | // This section contains the relationships for this model. See: https://docs.forestadmin.com/documentation/v/v6/reference-guide/relationships#adding-relationships. 25 | {{modelVariableName}}.associate = (models) => { 26 | {{#each references as |reference|}} 27 | {{../modelVariableName}}.{{reference.association}}(models.{{reference.ref}}, { 28 | {{#if reference.isBelongsToMany}} 29 | through: '{{reference.through}}', 30 | foreignKey: '{{reference.foreignKey}}', 31 | otherKey: '{{reference.otherKey}}', 32 | {{else}} 33 | foreignKey: { 34 | name: '{{reference.foreignKeyName}}', 35 | field: '{{reference.foreignKey}}', 36 | },{{#if reference.targetKey}} 37 | targetKey: '{{reference.targetKey}}',{{/if}}{{#if reference.sourceKey}} 38 | sourceKey: '{{reference.sourceKey}}',{{/if}} 39 | {{/if}} 40 | {{#if reference.as}} 41 | as: '{{reference.as}}', 42 | {{/if}} 43 | }); 44 | {{/each}} 45 | }; 46 | 47 | return {{modelVariableName}}; 48 | }; 49 | -------------------------------------------------------------------------------- /templates/app/public/favicon.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ForestAdmin/lumber/d951e93ff075fc81c79142b296a953a9f06a330e/templates/app/public/favicon.png -------------------------------------------------------------------------------- /templates/app/server.hbs: -------------------------------------------------------------------------------- 1 | require('dotenv').config(); 2 | const debug = require('debug')('{name}:server'); 3 | const http = require('http'); 4 | const chalk = require('chalk'); 5 | const app = require('./app'); 6 | 7 | function normalizePort(val) { 8 | const port = parseInt(val, 10); 9 | 10 | if (Number.isNaN(port)) { return val; } 11 | if (port >= 0) { return port; } 12 | 13 | return false; 14 | } 15 | 16 | const port = normalizePort(process.env.PORT || process.env.APPLICATION_PORT || '3310'); 17 | app.set('port', port); 18 | 19 | const server = http.createServer(app); 20 | server.listen(port); 21 | 22 | function onError(error) { 23 | if (error.syscall !== 'listen') { 24 | throw error; 25 | } 26 | 27 | const bind = typeof port === 'string' 28 | ? `Pipe ${port}` 29 | : `Port ${port}`; 30 | 31 | switch (error.code) { 32 | case 'EACCES': 33 | console.error(`${bind} requires elevated privileges`); 34 | process.exit(1); 35 | break; 36 | case 'EADDRINUSE': 37 | console.error(`${bind} is already in use`); 38 | process.exit(1); 39 | break; 40 | default: 41 | throw error; 42 | } 43 | } 44 | 45 | function onListening() { 46 | const addr = server.address(); 47 | const bind = typeof addr === 'string' 48 | ? `pipe ${addr}` 49 | : `port ${addr.port}`; 50 | debug(`Listening on ${bind}`); 51 | 52 | console.log(chalk.cyan(`Your application is listening on ${bind}.`)); 53 | } 54 | 55 | server.on('error', onError); 56 | server.on('listening', onListening); 57 | -------------------------------------------------------------------------------- /test-expected/mongo/db-analysis-output/complex-model-with-a-view.expected.json: -------------------------------------------------------------------------------- 1 | { 2 | "persons": { 3 | "fields": [ 4 | { 5 | "name": "cousin", 6 | "type": "Mongoose.Schema.Types.ObjectId", 7 | "ref": "persons" 8 | }, 9 | { 10 | "name": "dad", 11 | "type": "Mongoose.Schema.Types.ObjectId", 12 | "ref": "persons" 13 | }, 14 | { 15 | "name": "name", 16 | "type": "String" 17 | }, 18 | { 19 | "name": "preferredFilm", 20 | "type": "Mongoose.Schema.Types.ObjectId" 21 | }, 22 | { 23 | "name": "son", 24 | "type": "Mongoose.Schema.Types.ObjectId", 25 | "ref": "persons" 26 | } 27 | ], 28 | "references": [], 29 | "primaryKeys": [ 30 | "_id" 31 | ], 32 | "options": { 33 | "timestamps": false 34 | } 35 | }, 36 | "films": { 37 | "fields": [ 38 | { 39 | "name": "author", 40 | "type": "Mongoose.Schema.Types.ObjectId", 41 | "ref": "persons" 42 | }, 43 | { 44 | "name": "bestActor", 45 | "type": "Mongoose.Schema.Types.ObjectId", 46 | "ref": "persons" 47 | }, 48 | { 49 | "name": "title", 50 | "type": "String" 51 | } 52 | ], 53 | "references": [], 54 | "primaryKeys": [ 55 | "_id" 56 | ], 57 | "options": { 58 | "timestamps": false 59 | } 60 | }, 61 | "myView": { 62 | "fields": [], 63 | "references": [], 64 | "primaryKeys": [ 65 | "_id" 66 | ], 67 | "options": { 68 | "timestamps": false 69 | } 70 | } 71 | } 72 | -------------------------------------------------------------------------------- /test-expected/mongo/db-analysis-output/deep-nested-fields.expected.json: -------------------------------------------------------------------------------- 1 | { 2 | "persons": { 3 | "fields": [ 4 | { 5 | "name": "name", 6 | "type": "String" 7 | }, 8 | { 9 | "name": "very", 10 | "type": { 11 | "deep": { 12 | "model": { 13 | "arrayOfNumber": [ 14 | "Number" 15 | ], 16 | "arrayMixed": [ 17 | "Object" 18 | ], 19 | "arrayOfObjectIds": [ 20 | "Mongoose.Schema.Types.ObjectId" 21 | ], 22 | "arrayWithComplexObject": [ 23 | { 24 | "_id": "Mongoose.Schema.Types.ObjectId", 25 | "name": "String", 26 | "propGroup": { 27 | "answer": "Boolean", 28 | "date": "Date", 29 | "sentence": "String", 30 | "number": "Number" 31 | } 32 | } 33 | ], 34 | "arrayOfComplexObjects": [ 35 | { 36 | "_id": "Mongoose.Schema.Types.ObjectId", 37 | "propGroup": { 38 | "answer": "Boolean", 39 | "date": "Date", 40 | "sentence": "String", 41 | "number": "Number" 42 | }, 43 | "so": { 44 | "nested": { 45 | "arrayMixed": [ 46 | "Object" 47 | ], 48 | "arrayOfNumber": [ 49 | "Number" 50 | ] 51 | } 52 | } 53 | } 54 | ] 55 | } 56 | } 57 | } 58 | } 59 | ], 60 | "options": { 61 | "timestamps": false 62 | }, 63 | "primaryKeys": [ 64 | "_id" 65 | ], 66 | "references": [] 67 | } 68 | } 69 | -------------------------------------------------------------------------------- /test-expected/mongo/db-analysis-output/hasmany.expected.json: -------------------------------------------------------------------------------- 1 | { 2 | "films": { 3 | "fields": [ 4 | { 5 | "hasMany": true, 6 | "name": "actors", 7 | "ref": "persons", 8 | "type": "[Mongoose.Schema.Types.ObjectId]" 9 | }, 10 | { 11 | "name": "author", 12 | "ref": "persons", 13 | "type": "Mongoose.Schema.Types.ObjectId" 14 | }, 15 | { 16 | "name": "title", 17 | "type": "String" 18 | } 19 | ], 20 | "options": { 21 | "timestamps": false 22 | }, 23 | "primaryKeys": [ 24 | "_id" 25 | ], 26 | "references": [] 27 | }, 28 | "persons": { 29 | "fields": [ 30 | { 31 | "name": "name", 32 | "type": "String" 33 | } 34 | ], 35 | "options": { 36 | "timestamps": false 37 | }, 38 | "primaryKeys": [ 39 | "_id" 40 | ], 41 | "references": [] 42 | } 43 | } 44 | -------------------------------------------------------------------------------- /test-expected/mongo/db-analysis-output/many-nulls.expected.json: -------------------------------------------------------------------------------- 1 | { 2 | "films": { 3 | "fields": [ 4 | { 5 | "name": "author", 6 | "ref": "persons", 7 | "type": "Mongoose.Schema.Types.ObjectId" 8 | }, 9 | { 10 | "name": "title", 11 | "type": "String" 12 | } 13 | ], 14 | "options": { 15 | "timestamps": false 16 | }, 17 | "primaryKeys": [ 18 | "_id" 19 | ], 20 | "references": [] 21 | }, 22 | "persons": { 23 | "fields": [ 24 | { 25 | "name": "name", 26 | "type": "String" 27 | } 28 | ], 29 | "options": { 30 | "timestamps": false 31 | }, 32 | "primaryKeys": [ 33 | "_id" 34 | ], 35 | "references": [] 36 | } 37 | } 38 | -------------------------------------------------------------------------------- /test-expected/mongo/db-analysis-output/many-objectid-fields.expected.json: -------------------------------------------------------------------------------- 1 | { 2 | "films": { 3 | "fields": [ 4 | { 5 | "name": "author", 6 | "ref": "persons", 7 | "type": "Mongoose.Schema.Types.ObjectId" 8 | }, 9 | { 10 | "name": "bestActor", 11 | "ref": "persons", 12 | "type": "Mongoose.Schema.Types.ObjectId" 13 | }, 14 | { 15 | "name": "title", 16 | "type": "String" 17 | } 18 | ], 19 | "options": { 20 | "timestamps": false 21 | }, 22 | "primaryKeys": [ 23 | "_id" 24 | ], 25 | "references": [] 26 | }, 27 | "persons": { 28 | "fields": [ 29 | { 30 | "name": "cousin", 31 | "ref": "persons", 32 | "type": "Mongoose.Schema.Types.ObjectId" 33 | }, 34 | { 35 | "name": "dad", 36 | "ref": "persons", 37 | "type": "Mongoose.Schema.Types.ObjectId" 38 | }, 39 | { 40 | "name": "name", 41 | "type": "String" 42 | }, 43 | { 44 | "name": "preferredFilm", 45 | "ref": "films", 46 | "type": "Mongoose.Schema.Types.ObjectId" 47 | }, 48 | { 49 | "name": "son", 50 | "ref": "persons", 51 | "type": "Mongoose.Schema.Types.ObjectId" 52 | } 53 | ], 54 | "options": { 55 | "timestamps": false 56 | }, 57 | "primaryKeys": [ 58 | "_id" 59 | ], 60 | "references": [] 61 | } 62 | } 63 | -------------------------------------------------------------------------------- /test-expected/mongo/db-analysis-output/multiple-nested-array-of-objects-fields.expected.json: -------------------------------------------------------------------------------- 1 | { 2 | "persons": { 3 | "fields": [ 4 | { 5 | "name": "name", 6 | "type": "String" 7 | }, 8 | { 9 | "name": "propArrayOfObjects", 10 | "type": [ 11 | { 12 | "_id": "Mongoose.Schema.Types.ObjectId", 13 | "one": "Object", 14 | "two": "String" 15 | } 16 | ] 17 | } 18 | ], 19 | "options": { 20 | "timestamps": false 21 | }, 22 | "primaryKeys": [ 23 | "_id" 24 | ], 25 | "references": [] 26 | } 27 | } 28 | -------------------------------------------------------------------------------- /test-expected/mongo/db-analysis-output/multiple-references-from-same-field.expected.json: -------------------------------------------------------------------------------- 1 | { 2 | "actors": { 3 | "fields": [ 4 | { 5 | "name": "name", 6 | "type": "String" 7 | } 8 | ], 9 | "options": { 10 | "timestamps": false 11 | }, 12 | "primaryKeys": [ 13 | "_id" 14 | ], 15 | "references": [] 16 | }, 17 | "films": { 18 | "fields": [ 19 | { 20 | "name": "author", 21 | "type": "Mongoose.Schema.Types.ObjectId" 22 | }, 23 | { 24 | "name": "refersTo", 25 | "type": "String" 26 | }, 27 | { 28 | "name": "title", 29 | "type": "String" 30 | } 31 | ], 32 | "options": { 33 | "timestamps": false 34 | }, 35 | "primaryKeys": [ 36 | "_id" 37 | ], 38 | "references": [] 39 | }, 40 | "persons": { 41 | "fields": [ 42 | { 43 | "name": "name", 44 | "type": "String" 45 | } 46 | ], 47 | "options": { 48 | "timestamps": false 49 | }, 50 | "primaryKeys": [ 51 | "_id" 52 | ], 53 | "references": [] 54 | } 55 | } 56 | -------------------------------------------------------------------------------- /test-expected/mongo/db-analysis-output/nested-array-of-numbers-fields.expected.json: -------------------------------------------------------------------------------- 1 | { 2 | "persons": { 3 | "fields": [ 4 | { 5 | "name": "name", 6 | "type": "String" 7 | }, 8 | { 9 | "name": "propArrayOfNumbers", 10 | "type": [ 11 | "Number" 12 | ] 13 | } 14 | ], 15 | "options": { 16 | "timestamps": false 17 | }, 18 | "primaryKeys": [ 19 | "_id" 20 | ], 21 | "references": [] 22 | } 23 | } 24 | -------------------------------------------------------------------------------- /test-expected/mongo/db-analysis-output/nested-array-of-objects-fields.expected.json: -------------------------------------------------------------------------------- 1 | { 2 | "persons": { 3 | "fields": [ 4 | { 5 | "name": "name", 6 | "type": "String" 7 | }, 8 | { 9 | "name": "propArrayOfObjects", 10 | "type": [ 11 | { 12 | "_id": "Mongoose.Schema.Types.ObjectId", 13 | "one": "String", 14 | "two": "String" 15 | } 16 | ] 17 | } 18 | ], 19 | "options": { 20 | "timestamps": false 21 | }, 22 | "primaryKeys": [ 23 | "_id" 24 | ], 25 | "references": [] 26 | } 27 | } 28 | -------------------------------------------------------------------------------- /test-expected/mongo/db-analysis-output/nested-object-fields.expected.json: -------------------------------------------------------------------------------- 1 | { 2 | "persons": { 3 | "fields": [ 4 | { 5 | "name": "name", 6 | "type": "String" 7 | }, 8 | { 9 | "name": "propGroup", 10 | "type": { 11 | "answer": "Boolean", 12 | "date": "Date", 13 | "number": "Number", 14 | "sentence": "String" 15 | } 16 | } 17 | ], 18 | "options": { 19 | "timestamps": false 20 | }, 21 | "primaryKeys": [ 22 | "_id" 23 | ], 24 | "references": [] 25 | } 26 | } 27 | -------------------------------------------------------------------------------- /test-expected/mongo/db-analysis-output/simple.expected.json: -------------------------------------------------------------------------------- 1 | { 2 | "films": { 3 | "fields": [ 4 | { 5 | "name": "author", 6 | "ref": "persons", 7 | "type": "Mongoose.Schema.Types.ObjectId" 8 | }, 9 | { 10 | "name": "title", 11 | "type": "String" 12 | } 13 | ], 14 | "options": { 15 | "timestamps": false 16 | }, 17 | "primaryKeys": [ 18 | "_id" 19 | ], 20 | "references": [] 21 | }, 22 | "persons": { 23 | "fields": [ 24 | { 25 | "name": "name", 26 | "type": "String" 27 | } 28 | ], 29 | "options": { 30 | "timestamps": false 31 | }, 32 | "primaryKeys": [ 33 | "_id" 34 | ], 35 | "references": [] 36 | } 37 | } 38 | -------------------------------------------------------------------------------- /test-expected/mongo/db-analysis-output/sub-document-not-using-ids.expected.json: -------------------------------------------------------------------------------- 1 | { 2 | "persons": { 3 | "fields": [ 4 | { 5 | "name": "name", 6 | "type": "String" 7 | }, 8 | { 9 | "name": "propArrayOfObjects", 10 | "type": [ 11 | { 12 | "_id": false, 13 | "sampleValue": "String", 14 | "complex name": "String" 15 | } 16 | ] 17 | } 18 | ], 19 | "options": { 20 | "timestamps": false 21 | }, 22 | "primaryKeys": [ 23 | "_id" 24 | ], 25 | "references": [] 26 | } 27 | } 28 | -------------------------------------------------------------------------------- /test-expected/mongo/db-analysis-output/sub-document-using-ids.expected.json: -------------------------------------------------------------------------------- 1 | { 2 | "persons": { 3 | "fields": [ 4 | { 5 | "name": "name", 6 | "type": "String" 7 | }, 8 | { 9 | "name": "propArrayOfObjects", 10 | "type": [ 11 | { 12 | "_id": "Mongoose.Schema.Types.ObjectId", 13 | "sampleValue": "String", 14 | "complex name": "String" 15 | } 16 | ] 17 | } 18 | ], 19 | "options": { 20 | "timestamps": false 21 | }, 22 | "primaryKeys": [ 23 | "_id" 24 | ], 25 | "references": [] 26 | } 27 | } 28 | -------------------------------------------------------------------------------- /test-expected/mongo/db-analysis-output/sub-documents-ambiguous-ids.expected.json: -------------------------------------------------------------------------------- 1 | { 2 | "persons": { 3 | "fields": [ 4 | { 5 | "name": "name", 6 | "type": "String" 7 | }, 8 | { 9 | "name": "propArrayOfObjects", 10 | "type": [ 11 | { 12 | "_id": "ambiguous", 13 | "sampleValue": "String", 14 | "complex name": "String" 15 | } 16 | ] 17 | } 18 | ], 19 | "options": { 20 | "timestamps": false 21 | }, 22 | "primaryKeys": [ 23 | "_id" 24 | ], 25 | "references": [] 26 | } 27 | } 28 | -------------------------------------------------------------------------------- /test-expected/mongo/db-analysis-output/sub-documents-not-using-ids.expected.json: -------------------------------------------------------------------------------- 1 | { 2 | "persons": { 3 | "fields": [ 4 | { 5 | "name": "name", 6 | "type": "String" 7 | }, 8 | { 9 | "name": "propArrayOfObjects", 10 | "type": [ 11 | { 12 | "_id": false, 13 | "sampleValue": "String", 14 | "complex name": "String" 15 | } 16 | ] 17 | } 18 | ], 19 | "options": { 20 | "timestamps": false 21 | }, 22 | "primaryKeys": [ 23 | "_id" 24 | ], 25 | "references": [] 26 | } 27 | } 28 | -------------------------------------------------------------------------------- /test-expected/mongo/db-analysis-output/sub-documents-using-ids.expected.json: -------------------------------------------------------------------------------- 1 | { 2 | "persons": { 3 | "fields": [ 4 | { 5 | "name": "name", 6 | "type": "String" 7 | }, 8 | { 9 | "name": "propArrayOfObjects", 10 | "type": [ 11 | { 12 | "_id": "Mongoose.Schema.Types.ObjectId", 13 | "sampleValue": "String", 14 | "complex name": "String" 15 | } 16 | ] 17 | } 18 | ], 19 | "options": { 20 | "timestamps": false 21 | }, 22 | "primaryKeys": [ 23 | "_id" 24 | ], 25 | "references": [] 26 | } 27 | } 28 | -------------------------------------------------------------------------------- /test-expected/mongo/dumper-output/databases.config.expected.js: -------------------------------------------------------------------------------- 1 | const path = require('path'); 2 | 3 | const databaseOptions = { 4 | useNewUrlParser: true, 5 | useUnifiedTopology: true, 6 | }; 7 | 8 | module.exports = [{ 9 | name: 'default', 10 | modelsDir: path.resolve(__dirname, '../models'), 11 | connection: { 12 | url: process.env.DATABASE_URL, 13 | options: { ...databaseOptions }, 14 | }, 15 | }]; 16 | -------------------------------------------------------------------------------- /test-expected/mongo/dumper-output/deep-nested.expected.js: -------------------------------------------------------------------------------- 1 | // This model was generated by Lumber. However, you remain in control of your models. 2 | // Learn how here: https://docs.forestadmin.com/documentation/v/v6/reference-guide/models/enrich-your-models 3 | 4 | module.exports = (mongoose, Mongoose) => { 5 | // This section contains the properties of your model, mapped to your collection's properties. 6 | // Learn more here: https://docs.forestadmin.com/documentation/v/v6/reference-guide/models/enrich-your-models#declaring-a-new-field-in-a-model 7 | const schema = Mongoose.Schema({ 8 | 'name': String, 9 | 'very': { 10 | 'deep': { 11 | 'model': { 12 | 'arrayOfNumber': [Number], 13 | 'arrayMixed': [Object], 14 | 'arrayOfObjectIds': [Mongoose.Schema.Types.ObjectId], 15 | 'arrayWithComplexObject': [{ 16 | 'name': String, 17 | 'propGroup': { 18 | 'answer': Boolean, 19 | 'date': Date, 20 | 'sentence': String, 21 | 'number': Number, 22 | }, 23 | }], 24 | 'arrayOfComplexObjects': [{ 25 | 'propGroup': { 26 | 'answer': Boolean, 27 | 'date': Date, 28 | 'sentence': String, 29 | 'number': Number, 30 | }, 31 | 'so': { 32 | 'nested': { 33 | 'arrayMixed': [Object], 34 | 'arrayOfNumber': [Number], 35 | }, 36 | }, 37 | }], 38 | }, 39 | }, 40 | }, 41 | }, { 42 | timestamps: false, 43 | }); 44 | 45 | return mongoose.model('persons', schema, 'persons'); 46 | }; 47 | -------------------------------------------------------------------------------- /test-expected/mongo/dumper-output/hasmany.expected.js: -------------------------------------------------------------------------------- 1 | // This model was generated by Lumber. However, you remain in control of your models. 2 | // Learn how here: https://docs.forestadmin.com/documentation/v/v6/reference-guide/models/enrich-your-models 3 | 4 | module.exports = (mongoose, Mongoose) => { 5 | // This section contains the properties of your model, mapped to your collection's properties. 6 | // Learn more here: https://docs.forestadmin.com/documentation/v/v6/reference-guide/models/enrich-your-models#declaring-a-new-field-in-a-model 7 | const schema = Mongoose.Schema({ 8 | 'actors': { type: [Mongoose.Schema.Types.ObjectId], ref: 'persons' }, 9 | 'author': { type: Mongoose.Schema.Types.ObjectId, ref: 'persons' }, 10 | 'title': String, 11 | }, { 12 | timestamps: false, 13 | }); 14 | 15 | return mongoose.model('films', schema, 'films'); 16 | }; 17 | -------------------------------------------------------------------------------- /test-expected/mongo/dumper-output/index.expected.js: -------------------------------------------------------------------------------- 1 | const fs = require('fs'); 2 | const path = require('path'); 3 | const Mongoose = require('mongoose'); 4 | 5 | const databasesConfiguration = require('../config/databases'); 6 | 7 | const connections = {}; 8 | const db = {}; 9 | 10 | databasesConfiguration.forEach((databaseInfo) => { 11 | const connection = Mongoose.createConnection(databaseInfo.connection.url, databaseInfo.connection.options); 12 | connections[databaseInfo.name] = connection; 13 | 14 | const modelsDir = databaseInfo.modelsDir || path.join(__dirname, databaseInfo.name); 15 | fs 16 | .readdirSync(modelsDir) 17 | .filter((file) => file.indexOf('.') !== 0 && file !== 'index.js') 18 | .forEach((file) => { 19 | try { 20 | const model = require(path.join(modelsDir, file))(connection, Mongoose); 21 | db[model.modelName] = model; 22 | } catch (error) { 23 | console.error(`Model creation error: ${error}`); 24 | } 25 | }); 26 | }); 27 | 28 | db.objectMapping = Mongoose; 29 | db.connections = connections; 30 | 31 | module.exports = db; 32 | -------------------------------------------------------------------------------- /test-expected/mongo/dumper-output/nested-array-of-numbers.expected.js: -------------------------------------------------------------------------------- 1 | // This model was generated by Lumber. However, you remain in control of your models. 2 | // Learn how here: https://docs.forestadmin.com/documentation/v/v6/reference-guide/models/enrich-your-models 3 | 4 | module.exports = (mongoose, Mongoose) => { 5 | // This section contains the properties of your model, mapped to your collection's properties. 6 | // Learn more here: https://docs.forestadmin.com/documentation/v/v6/reference-guide/models/enrich-your-models#declaring-a-new-field-in-a-model 7 | const schema = Mongoose.Schema({ 8 | 'name': String, 9 | 'propArrayOfNumbers': [Number], 10 | }, { 11 | timestamps: false, 12 | }); 13 | 14 | return mongoose.model('persons', schema, 'persons'); 15 | }; 16 | -------------------------------------------------------------------------------- /test-expected/mongo/dumper-output/nested-array-of-objects.expected.js: -------------------------------------------------------------------------------- 1 | // This model was generated by Lumber. However, you remain in control of your models. 2 | // Learn how here: https://docs.forestadmin.com/documentation/v/v6/reference-guide/models/enrich-your-models 3 | 4 | module.exports = (mongoose, Mongoose) => { 5 | // This section contains the properties of your model, mapped to your collection's properties. 6 | // Learn more here: https://docs.forestadmin.com/documentation/v/v6/reference-guide/models/enrich-your-models#declaring-a-new-field-in-a-model 7 | const schema = Mongoose.Schema({ 8 | 'name': String, 9 | 'propArrayOfObjects': [{ 10 | 'one': String, 11 | 'two': String, 12 | }], 13 | }, { 14 | timestamps: false, 15 | }); 16 | 17 | return mongoose.model('persons', schema, 'persons'); 18 | }; 19 | -------------------------------------------------------------------------------- /test-expected/mongo/dumper-output/nested-object.expected.js: -------------------------------------------------------------------------------- 1 | // This model was generated by Lumber. However, you remain in control of your models. 2 | // Learn how here: https://docs.forestadmin.com/documentation/v/v6/reference-guide/models/enrich-your-models 3 | 4 | module.exports = (mongoose, Mongoose) => { 5 | // This section contains the properties of your model, mapped to your collection's properties. 6 | // Learn more here: https://docs.forestadmin.com/documentation/v/v6/reference-guide/models/enrich-your-models#declaring-a-new-field-in-a-model 7 | const schema = Mongoose.Schema({ 8 | 'name': String, 9 | 'propGroup': { 10 | 'answer': Boolean, 11 | 'date': Date, 12 | 'number': Number, 13 | 'sentence': String, 14 | }, 15 | }, { 16 | timestamps: false, 17 | }); 18 | 19 | return mongoose.model('persons', schema, 'persons'); 20 | }; 21 | -------------------------------------------------------------------------------- /test-expected/mongo/dumper-output/simple.expected.js: -------------------------------------------------------------------------------- 1 | // This model was generated by Lumber. However, you remain in control of your models. 2 | // Learn how here: https://docs.forestadmin.com/documentation/v/v6/reference-guide/models/enrich-your-models 3 | 4 | module.exports = (mongoose, Mongoose) => { 5 | // This section contains the properties of your model, mapped to your collection's properties. 6 | // Learn more here: https://docs.forestadmin.com/documentation/v/v6/reference-guide/models/enrich-your-models#declaring-a-new-field-in-a-model 7 | const schema = Mongoose.Schema({ 8 | 'author': { type: Mongoose.Schema.Types.ObjectId, ref: 'persons' }, 9 | 'title': String, 10 | }, { 11 | timestamps: false, 12 | }); 13 | 14 | return mongoose.model('films', schema, 'films'); 15 | }; 16 | -------------------------------------------------------------------------------- /test-expected/mongo/dumper-output/sub-document-not-using-ids.expected.js: -------------------------------------------------------------------------------- 1 | // This model was generated by Lumber. However, you remain in control of your models. 2 | // Learn how here: https://docs.forestadmin.com/documentation/v/v6/reference-guide/models/enrich-your-models 3 | 4 | module.exports = (mongoose, Mongoose) => { 5 | // This section contains the properties of your model, mapped to your collection's properties. 6 | // Learn more here: https://docs.forestadmin.com/documentation/v/v6/reference-guide/models/enrich-your-models#declaring-a-new-field-in-a-model 7 | const schema = Mongoose.Schema({ 8 | 'name': String, 9 | 'propArrayOfObjects': [{ 10 | _id: false, 11 | 'sampleValue': String, 12 | 'complex name': String, 13 | }], 14 | }, { 15 | timestamps: false, 16 | }); 17 | 18 | return mongoose.model('persons', schema, 'persons'); 19 | }; 20 | -------------------------------------------------------------------------------- /test-expected/mongo/dumper-output/sub-document-using-ids.expected.js: -------------------------------------------------------------------------------- 1 | // This model was generated by Lumber. However, you remain in control of your models. 2 | // Learn how here: https://docs.forestadmin.com/documentation/v/v6/reference-guide/models/enrich-your-models 3 | 4 | module.exports = (mongoose, Mongoose) => { 5 | // This section contains the properties of your model, mapped to your collection's properties. 6 | // Learn more here: https://docs.forestadmin.com/documentation/v/v6/reference-guide/models/enrich-your-models#declaring-a-new-field-in-a-model 7 | const schema = Mongoose.Schema({ 8 | 'name': String, 9 | 'propArrayOfObjects': [{ 10 | 'sampleValue': String, 11 | 'complex name': String, 12 | }], 13 | }, { 14 | timestamps: false, 15 | }); 16 | 17 | return mongoose.model('persons', schema, 'persons'); 18 | }; 19 | -------------------------------------------------------------------------------- /test-expected/mongo/dumper-output/sub-documents-ambiguous-ids.expected.js: -------------------------------------------------------------------------------- 1 | // This model was generated by Lumber. However, you remain in control of your models. 2 | // Learn how here: https://docs.forestadmin.com/documentation/v/v6/reference-guide/models/enrich-your-models 3 | 4 | module.exports = (mongoose, Mongoose) => { 5 | // This section contains the properties of your model, mapped to your collection's properties. 6 | // Learn more here: https://docs.forestadmin.com/documentation/v/v6/reference-guide/models/enrich-your-models#declaring-a-new-field-in-a-model 7 | const schema = Mongoose.Schema({ 8 | 'name': String, 9 | 'propArrayOfObjects': [{ 10 | // _id: false, Ambiguous usage of _ids, we could not detect if subDocuments use _id or not. 11 | 'sampleValue': String, 12 | 'complex name': String, 13 | }], 14 | }, { 15 | timestamps: false, 16 | }); 17 | 18 | return mongoose.model('persons', schema, 'persons'); 19 | }; 20 | -------------------------------------------------------------------------------- /test-expected/mongo/dumper-output/sub-documents-not-using-ids.expected.js: -------------------------------------------------------------------------------- 1 | // This model was generated by Lumber. However, you remain in control of your models. 2 | // Learn how here: https://docs.forestadmin.com/documentation/v/v6/reference-guide/models/enrich-your-models 3 | 4 | module.exports = (mongoose, Mongoose) => { 5 | // This section contains the properties of your model, mapped to your collection's properties. 6 | // Learn more here: https://docs.forestadmin.com/documentation/v/v6/reference-guide/models/enrich-your-models#declaring-a-new-field-in-a-model 7 | const schema = Mongoose.Schema({ 8 | 'name': String, 9 | 'propArrayOfObjects': [{ 10 | _id: false, 11 | 'sampleValue': String, 12 | 'complex name': String, 13 | }], 14 | }, { 15 | timestamps: false, 16 | }); 17 | 18 | return mongoose.model('persons', schema, 'persons'); 19 | }; 20 | -------------------------------------------------------------------------------- /test-expected/mongo/dumper-output/sub-documents-using-ids.expected.js: -------------------------------------------------------------------------------- 1 | // This model was generated by Lumber. However, you remain in control of your models. 2 | // Learn how here: https://docs.forestadmin.com/documentation/v/v6/reference-guide/models/enrich-your-models 3 | 4 | module.exports = (mongoose, Mongoose) => { 5 | // This section contains the properties of your model, mapped to your collection's properties. 6 | // Learn more here: https://docs.forestadmin.com/documentation/v/v6/reference-guide/models/enrich-your-models#declaring-a-new-field-in-a-model 7 | const schema = Mongoose.Schema({ 8 | 'name': String, 9 | 'propArrayOfObjects': [{ 10 | 'sampleValue': String, 11 | 'complex name': String, 12 | }], 13 | }, { 14 | timestamps: false, 15 | }); 16 | 17 | return mongoose.model('persons', schema, 'persons'); 18 | }; 19 | -------------------------------------------------------------------------------- /test-expected/sequelize/constraints-getter-output/addresses.expected.js: -------------------------------------------------------------------------------- 1 | const mssql = [ 2 | { 3 | tableName: 'addresses', 4 | columnName: 'user_id', 5 | columnType: 'PRIMARY KEY', 6 | constraintName: 'addresses_pkey', 7 | foreignTableName: null, 8 | foreignColumnName: null, 9 | uniqueIndexes: [['city']], 10 | }, 11 | { 12 | tableName: 'addresses', 13 | columnName: 'user_id', 14 | columnType: 'FOREIGN KEY', 15 | constraintName: 'fk_user_id', 16 | foreignTableName: 'users', 17 | foreignColumnName: 'id', 18 | uniqueIndexes: [['city']], 19 | }, 20 | { 21 | tableName: 'addresses', 22 | columnName: 'city', 23 | columnType: 'UNIQUE', 24 | constraintName: 'unique_city', 25 | foreignColumnName: null, 26 | foreignTableName: null, 27 | uniqueIndexes: [['city']], 28 | }, 29 | ]; 30 | 31 | const mysql = [ 32 | { 33 | tableName: 'addresses', 34 | columnName: 'user_id', 35 | columnType: 'PRIMARY KEY', 36 | constraintName: 'PRIMARY', 37 | foreignTableName: null, 38 | foreignColumnName: null, 39 | uniqueIndexes: [['city']], 40 | }, 41 | { 42 | tableName: 'addresses', 43 | columnName: 'user_id', 44 | columnType: 'FOREIGN KEY', 45 | constraintName: 'fk_user_id', 46 | foreignTableName: 'users', 47 | foreignColumnName: 'id', 48 | uniqueIndexes: [['city']], 49 | }, 50 | { 51 | tableName: 'addresses', 52 | columnName: 'city', 53 | columnType: 'UNIQUE', 54 | constraintName: 'unique_city', 55 | foreignColumnName: null, 56 | foreignTableName: null, 57 | uniqueIndexes: [['city']], 58 | }, 59 | ]; 60 | 61 | const postgres = [ 62 | { 63 | tableName: 'addresses', 64 | columnName: 'city', 65 | columnType: 'UNIQUE', 66 | constraintName: 'unique_city', 67 | foreignTableName: 'addresses', 68 | foreignColumnName: 'city', 69 | uniqueIndexes: [['city']], 70 | }, 71 | { 72 | tableName: 'addresses', 73 | columnName: 'user_id', 74 | columnType: 'PRIMARY KEY', 75 | constraintName: 'addresses_pkey', 76 | foreignTableName: 'addresses', 77 | foreignColumnName: 'user_id', 78 | uniqueIndexes: [['city']], 79 | }, 80 | { 81 | tableName: 'addresses', 82 | columnName: 'user_id', 83 | columnType: 'FOREIGN KEY', 84 | constraintName: 'fk_user_id', 85 | foreignTableName: 'users', 86 | foreignColumnName: 'id', 87 | uniqueIndexes: [['city']], 88 | }, 89 | ]; 90 | 91 | module.exports = { mssql, mysql, postgres }; 92 | -------------------------------------------------------------------------------- /test-expected/sequelize/constraints-getter-output/customers.expected.js: -------------------------------------------------------------------------------- 1 | const mssql = [ 2 | { 3 | tableName: 'customers', 4 | columnName: 'id', 5 | columnType: 'PRIMARY KEY', 6 | constraintName: 'pk_customers', 7 | foreignTableName: null, 8 | foreignColumnName: null, 9 | uniqueIndexes: null, 10 | }, 11 | ]; 12 | 13 | const mysql = [ 14 | { 15 | tableName: 'customers', 16 | columnName: 'id', 17 | columnType: 'PRIMARY KEY', 18 | constraintName: 'PRIMARY', 19 | foreignTableName: null, 20 | foreignColumnName: null, 21 | uniqueIndexes: null, 22 | }, 23 | ]; 24 | 25 | const postgres = [ 26 | { 27 | tableName: 'customers', 28 | columnName: 'id', 29 | columnType: 'PRIMARY KEY', 30 | constraintName: 'customers_pk', 31 | foreignTableName: 'customers', 32 | foreignColumnName: 'id', 33 | uniqueIndexes: null, 34 | }, 35 | ]; 36 | 37 | module.exports = { mssql, mysql, postgres }; 38 | -------------------------------------------------------------------------------- /test-expected/sequelize/db-analysis-output/addresses.expected.json: -------------------------------------------------------------------------------- 1 | { 2 | "addresses": { 3 | "fields": [ 4 | { 5 | "name": "userId", 6 | "nameColumn": "user_id", 7 | "type": "INTEGER", 8 | "primaryKey": true, 9 | "defaultValue": null, 10 | "isRequired": true 11 | }, 12 | { 13 | "name": "street", 14 | "nameColumn": "street", 15 | "type": "STRING", 16 | "primaryKey": false, 17 | "defaultValue": null, 18 | "isRequired": true 19 | }, 20 | { 21 | "name": "city", 22 | "nameColumn": "city", 23 | "type": "STRING", 24 | "primaryKey": false, 25 | "defaultValue": null, 26 | "isRequired": true 27 | }, 28 | { 29 | "name": "state", 30 | "nameColumn": "state", 31 | "type": "STRING", 32 | "primaryKey": false, 33 | "defaultValue": null, 34 | "isRequired": true 35 | } 36 | ], 37 | "references": [ 38 | { 39 | "association": "belongsTo", 40 | "ref": "users", 41 | "foreignKey": "user_id", 42 | "foreignKeyName": "userIdKey", 43 | "as": "user" 44 | } 45 | ], 46 | "primaryKeys": ["user_id"], 47 | "options": { 48 | "hasIdColumn": false, 49 | "hasPrimaryKeys": true, 50 | "isJunction": false, 51 | "timestamps": false, 52 | "underscored": true 53 | } 54 | } 55 | } 56 | -------------------------------------------------------------------------------- /test-expected/sequelize/db-analysis-output/customers.expected.json: -------------------------------------------------------------------------------- 1 | { 2 | "customers": { 3 | "fields": [ 4 | { 5 | "name": "name", 6 | "nameColumn": "name", 7 | "type": "STRING", 8 | "primaryKey": false, 9 | "defaultValue": null, 10 | "isRequired": true 11 | }, 12 | { 13 | "name": "description", 14 | "nameColumn": "description", 15 | "type": "STRING", 16 | "primaryKey": false, 17 | "defaultValue": null, 18 | "isRequired": false 19 | }, 20 | { 21 | "name": "isActive", 22 | "nameColumn": "is_active", 23 | "type": "BOOLEAN", 24 | "primaryKey": false, 25 | "defaultValue": true, 26 | "isRequired": true 27 | }, 28 | { 29 | "nameColumn": "paying", 30 | "defaultValue": false, 31 | "name": "paying", 32 | "primaryKey": false, 33 | "type": "BOOLEAN", 34 | "isRequired": true 35 | }, 36 | { 37 | "name": "createdAt", 38 | "nameColumn": "created_at", 39 | "type": "DATE", 40 | "primaryKey": false, 41 | "defaultValue": null, 42 | "isRequired": false 43 | }, 44 | { 45 | "name": "updatedAt", 46 | "nameColumn": "updated_at", 47 | "type": "DATE", 48 | "primaryKey": false, 49 | "defaultValue": null, 50 | "isRequired": false 51 | } 52 | ], 53 | "references": [], 54 | "primaryKeys": ["id"], 55 | "options": { 56 | "hasIdColumn": true, 57 | "hasPrimaryKeys": true, 58 | "isJunction": false, 59 | "timestamps": true, 60 | "underscored": true 61 | } 62 | } 63 | } 64 | -------------------------------------------------------------------------------- /test-expected/sequelize/db-analysis-output/default_values.mssql.expected.js: -------------------------------------------------------------------------------- 1 | const Sequelize = require('sequelize'); 2 | 3 | module.exports = { 4 | default_values: { 5 | fields: [ 6 | { 7 | name: "boolNull", 8 | nameColumn: "bool_null", 9 | type: "BOOLEAN", 10 | primaryKey: false, 11 | defaultValue: null, 12 | isRequired: false, 13 | }, 14 | { 15 | name: "boolCst", 16 | nameColumn: "bool_cst", 17 | type: "BOOLEAN", 18 | primaryKey: false, 19 | defaultValue: true, 20 | isRequired: false, 21 | }, 22 | { 23 | name: "intCst", 24 | nameColumn: "int_cst", 25 | type: "INTEGER", 26 | primaryKey: false, 27 | defaultValue: 42, 28 | isRequired: false, 29 | }, 30 | { 31 | name: "strNull", 32 | nameColumn: "str_null", 33 | type: "STRING", 34 | primaryKey: false, 35 | defaultValue: null, 36 | isRequired: false, 37 | }, 38 | { 39 | name: "strCst", 40 | nameColumn: "str_cst", 41 | type: "STRING", 42 | primaryKey: false, 43 | defaultValue: 'co\'nst\'ant', 44 | isRequired: false, 45 | }, 46 | { 47 | name: "strExpr", 48 | nameColumn: "str_expr", 49 | type: "STRING", 50 | primaryKey: false, 51 | defaultValue: Sequelize.literal("upper(concat('Hello','World'))"), 52 | isRequired: false, 53 | }, 54 | { 55 | name: "dateNull", 56 | nameColumn: "date_null", 57 | type: "DATE", 58 | primaryKey: false, 59 | defaultValue: null, 60 | isRequired: false, 61 | }, 62 | { 63 | name: "dateCst1", 64 | nameColumn: "date_cst1", 65 | type: "DATE", 66 | primaryKey: false, 67 | defaultValue: '2015-05-11 13:01:01', 68 | isRequired: false, 69 | }, 70 | { 71 | name: "dateCst2", 72 | nameColumn: "date_cst2", 73 | type: "DATE", 74 | primaryKey: false, 75 | defaultValue: '1983-05-27', 76 | isRequired: false, 77 | }, 78 | { 79 | name: "dateExpr1", 80 | nameColumn: "date_expr1", 81 | type: "DATE", 82 | primaryKey: false, 83 | defaultValue: Sequelize.literal("getutcdate()"), 84 | isRequired: false, 85 | }, 86 | { 87 | name: "dateExpr2", 88 | nameColumn: "date_expr2", 89 | type: "DATE", 90 | primaryKey: false, 91 | defaultValue: Sequelize.literal("getdate()"), 92 | isRequired: false, 93 | } 94 | ], 95 | primaryKeys: [ 96 | "id", 97 | ], 98 | options: { 99 | underscored: false, 100 | timestamps: false, 101 | hasIdColumn: true, 102 | hasPrimaryKeys: true, 103 | isJunction: false, 104 | }, 105 | references: [ 106 | ], 107 | } 108 | }; 109 | -------------------------------------------------------------------------------- /test-expected/sequelize/db-analysis-output/doubleref.expected.json: -------------------------------------------------------------------------------- 1 | { 2 | "doubleref": { 3 | "referencesLength": 2 4 | } 5 | } 6 | -------------------------------------------------------------------------------- /test-expected/sequelize/db-analysis-output/duplicatedalias.expected.json: -------------------------------------------------------------------------------- 1 | { 2 | "project": { 3 | "referencesLength": 4 4 | }, 5 | 6 | "joinroles": { 7 | "referencesLength": 2 8 | }, 9 | 10 | "roles": { 11 | "referencesLength": 2 12 | } 13 | } 14 | -------------------------------------------------------------------------------- /test-expected/sequelize/db-analysis-output/export.expected.json: -------------------------------------------------------------------------------- 1 | { 2 | "export": { 3 | "fields": [ 4 | { 5 | "name": "value", 6 | "nameColumn": "value", 7 | "type": "STRING", 8 | "primaryKey": false, 9 | "defaultValue": null 10 | } 11 | ], 12 | "references": [ 13 | ], 14 | "primaryKeys": ["id"], 15 | "options": { 16 | "hasIdColumn": true, 17 | "hasPrimaryKeys": true, 18 | "isJunction": false, 19 | "timestamps": false, 20 | "underscored": true 21 | } 22 | } 23 | } 24 | -------------------------------------------------------------------------------- /test-expected/sequelize/db-analysis-output/only-foreign-keys-and-id.expected.json: -------------------------------------------------------------------------------- 1 | { 2 | "only_foreign_keys_and_id": { 3 | "fields":[ 4 | { 5 | "name":"id", 6 | "nameColumn":"id", 7 | "type":"INTEGER", 8 | "primaryKey":true, 9 | "defaultValue":null, 10 | "isRequired":true 11 | } 12 | ], 13 | "primaryKeys":["id"], 14 | "options":{ 15 | "underscored":true, 16 | "timestamps":false, 17 | "hasIdColumn":true, 18 | "hasPrimaryKeys":true, 19 | "isJunction":true 20 | }, 21 | "references":[ 22 | { 23 | "foreignKey":"sample_id", 24 | "foreignKeyName":"sampleIdKey", 25 | "association":"belongsTo", 26 | "ref":"sample_table", 27 | "as":"sample" 28 | }, 29 | { 30 | "foreignKey":"car_id", 31 | "foreignKeyName":"carIdKey", 32 | "association":"belongsTo", 33 | "ref":"cars", 34 | "as":"car" 35 | } 36 | ] 37 | } 38 | } 39 | -------------------------------------------------------------------------------- /test-expected/sequelize/db-analysis-output/owners.expected.json: -------------------------------------------------------------------------------- 1 | { 2 | "owners": { 3 | "fields": [ 4 | { 5 | "name": "name", 6 | "nameColumn": "name", 7 | "type": "STRING", 8 | "primaryKey": false, 9 | "defaultValue": null, 10 | "isRequired": false 11 | }, 12 | { 13 | "name": "ownerId", 14 | "nameColumn": "owner_id", 15 | "type": "INTEGER", 16 | "primaryKey": false, 17 | "defaultValue": null, 18 | "isRequired": true 19 | } 20 | ], 21 | "primaryKeys": [ 22 | "id" 23 | ], 24 | "options": { 25 | "underscored": true, 26 | "timestamps": false, 27 | "hasIdColumn": true, 28 | "hasPrimaryKeys": true, 29 | "isJunction": false 30 | }, 31 | "references": [ 32 | { 33 | "foreignKey": "owner_id", 34 | "foreignKeyName": "ownerIdKey", 35 | "association": "hasMany", 36 | "ref": "projects", 37 | "sourceKey": "owner_id", 38 | "as": "projects" 39 | } 40 | ] 41 | } 42 | } 43 | -------------------------------------------------------------------------------- /test-expected/sequelize/db-analysis-output/parenthesis.expected.json: -------------------------------------------------------------------------------- 1 | { 2 | "parenthesis": { 3 | "fields": [ 4 | { 5 | "name": "id", 6 | "nameColumn": "id", 7 | "type": "INTEGER", 8 | "primaryKey": true, 9 | "defaultValue": null, 10 | "isRequired": true 11 | }, 12 | { 13 | "name": "Ingredients (Kcal/100g)", 14 | "nameColumn": "Ingredients (Kcal/100g)", 15 | "type": "STRING", 16 | "primaryKey": false, 17 | "defaultValue": null, 18 | "isRequired": false 19 | } 20 | ], 21 | "references": [], 22 | "primaryKeys": ["id"], 23 | "options": { 24 | "hasIdColumn": false, 25 | "hasPrimaryKeys": true, 26 | "isJunction": false, 27 | "timestamps": false, 28 | "underscored": false 29 | } 30 | } 31 | } 32 | -------------------------------------------------------------------------------- /test-expected/sequelize/db-analysis-output/parenthesis_underscored.expected.json: -------------------------------------------------------------------------------- 1 | { 2 | "parenthesis_underscored": { 3 | "fields": [ 4 | { 5 | "name": "id", 6 | "nameColumn": "id", 7 | "type": "INTEGER", 8 | "primaryKey": true, 9 | "defaultValue": null, 10 | "isRequired": true 11 | }, 12 | { 13 | "name": "Ingredients (Kcal/100g)", 14 | "nameColumn": "Ingredients (Kcal/100g)", 15 | "type": "STRING", 16 | "primaryKey": false, 17 | "defaultValue": null, 18 | "isRequired": false 19 | }, 20 | { 21 | "name": "ingredientWeight", 22 | "nameColumn": "ingredient_weight", 23 | "type": "INTEGER", 24 | "primaryKey": false, 25 | "defaultValue": null, 26 | "isRequired": true 27 | } 28 | ], 29 | "references": [], 30 | "primaryKeys": ["id"], 31 | "options": { 32 | "hasIdColumn": false, 33 | "hasPrimaryKeys": true, 34 | "isJunction": false, 35 | "timestamps": false, 36 | "underscored": false 37 | } 38 | } 39 | } 40 | -------------------------------------------------------------------------------- /test-expected/sequelize/db-analysis-output/parenthesis_underscored_true.expected.json: -------------------------------------------------------------------------------- 1 | { 2 | "parenthesis_underscored_true": { 3 | "fields": [ 4 | { 5 | "name": "id", 6 | "nameColumn": "id", 7 | "type": "INTEGER", 8 | "primaryKey": true, 9 | "defaultValue": null, 10 | "isRequired": true 11 | }, 12 | { 13 | "name": "Ingredients (Kcal/100g)", 14 | "nameColumn": "Ingredients (Kcal/100g)", 15 | "type": "STRING", 16 | "primaryKey": false, 17 | "defaultValue": null, 18 | "isRequired": false 19 | }, 20 | { 21 | "name": "ingredientWeight", 22 | "nameColumn": "ingredient_weight", 23 | "type": "INTEGER", 24 | "primaryKey": false, 25 | "defaultValue": null, 26 | "isRequired": true 27 | } 28 | ], 29 | "references": [], 30 | "primaryKeys": ["id"], 31 | "options": { 32 | "hasIdColumn": false, 33 | "hasPrimaryKeys": true, 34 | "isJunction": false, 35 | "timestamps": false, 36 | "underscored": true 37 | } 38 | } 39 | } 40 | -------------------------------------------------------------------------------- /test-expected/sequelize/db-analysis-output/projects.expected.json: -------------------------------------------------------------------------------- 1 | { 2 | "projects": { 3 | "fields": [ 4 | { 5 | "name": "name", 6 | "nameColumn": "name", 7 | "type": "STRING", 8 | "primaryKey": false, 9 | "defaultValue": null, 10 | "isRequired": false 11 | } 12 | ], 13 | "primaryKeys": [ 14 | "id" 15 | ], 16 | "options": { 17 | "underscored": false, 18 | "timestamps": false, 19 | "hasIdColumn": true, 20 | "hasPrimaryKeys": true, 21 | "isJunction": false 22 | }, 23 | "references": [ 24 | { 25 | "foreignKey": "owner_id", 26 | "foreignKeyName": "ownerIdKey", 27 | "association": "belongsTo", 28 | "ref": "owners", 29 | "as": "owner", 30 | "targetKey": "owner_id" 31 | } 32 | ] 33 | } 34 | } 35 | -------------------------------------------------------------------------------- /test-expected/sequelize/db-analysis-output/renderings.expected.json: -------------------------------------------------------------------------------- 1 | { 2 | "renderings": { 3 | "fields": [ 4 | { 5 | "name": "createdAt", 6 | "nameColumn": "createdAt", 7 | "type": "DATE", 8 | "primaryKey": false, 9 | "defaultValue": null 10 | }, 11 | { 12 | "name": "updatedAt", 13 | "nameColumn": "updatedAt", 14 | "type": "DATE", 15 | "primaryKey": false, 16 | "defaultValue": null 17 | }, 18 | { 19 | "name": "deletedAt", 20 | "nameColumn": "deletedAt", 21 | "type": "DATE", 22 | "primaryKey": false, 23 | "defaultValue": null 24 | }, 25 | { 26 | "name": "testEnum", 27 | "nameColumn": "testEnum", 28 | "type": "ENUM('LEFT','RIGHT')" 29 | }, 30 | { 31 | "name": "cacheVersion", 32 | "nameColumn": "cacheVersion", 33 | "type": "STRING", 34 | "primaryKey": false, 35 | "defaultValue": null 36 | }, 37 | { 38 | "name": "sections", 39 | "nameColumn": "sections", 40 | "type": "JSONB", 41 | "primaryKey": false, 42 | "defaultValueBak": "Sequelize.literal('[{\"name\": \"Dashboard\", \"isVisible\": true}, {\"name\": \"Data\", \"isVisible\": true}]')", 43 | "defaultValue": [{"name": "Dashboard", "isVisible": true}, {"name": "Data", "isVisible": true}] 44 | }, 45 | { 46 | "name": "collectionsPositions", 47 | "nameColumn": "collectionsPositions", 48 | "type": "ARRAY(DataTypes.INTEGER)", 49 | "primaryKey": false, 50 | "defaultValueBak": "Sequelize.literal('ARRAY[]')", 51 | "defaultValue": [] 52 | } 53 | ], 54 | "references": [ 55 | { 56 | "association": "belongsTo", 57 | "ref": "environments", 58 | "foreignKey": "environmentId", 59 | "foreignKeyName": "environmentIdUnconventionnalKey", 60 | "as": "environment" 61 | }, 62 | { 63 | "association": "belongsTo", 64 | "ref": "teams", 65 | "foreignKey": "teamId", 66 | "foreignKeyName": "teamIdKey", 67 | "as": "team" 68 | }, 69 | { 70 | "association": "belongsTo", 71 | "ref": "otherWithTarget", 72 | "foreignKey": "other", 73 | "foreignKeyName": "otherKey", 74 | "targetKey": "otherId", 75 | "as": "team2" 76 | }, 77 | { 78 | "association": "belongsTo", 79 | "ref": "film", 80 | "foreignKey": "filmKeyName", 81 | "foreignKeyName": "filmKeyNameKey", 82 | "targetKey": "film_target_key", 83 | "as": "films" 84 | } 85 | ], 86 | "primaryKeys": ["id"], 87 | "options": { 88 | "hasIdColumn": true, 89 | "hasPrimaryKeys": true, 90 | "isJunction": false, 91 | "timestamps": true, 92 | "underscored": false 93 | } 94 | } 95 | } 96 | -------------------------------------------------------------------------------- /test-expected/sequelize/db-analysis-output/rentals.expected.json: -------------------------------------------------------------------------------- 1 | { 2 | "rentals": { 3 | "fields": [ 4 | { 5 | "name": "carNo", 6 | "nameColumn": "car_no", 7 | "type": "INTEGER", 8 | "primaryKey": true, 9 | "defaultValue": null, 10 | "isRequired": true 11 | }, 12 | { 13 | "name": "fromDate", 14 | "nameColumn": "from_date", 15 | "type": "DATEONLY", 16 | "primaryKey": false, 17 | "defaultValue": null, 18 | "isRequired": true 19 | }, 20 | { 21 | "name": "toDate", 22 | "nameColumn": "to_date", 23 | "defaultValue": null, 24 | "primaryKey": false, 25 | "type": "DATEONLY", 26 | "isRequired": true 27 | } 28 | ], 29 | "references": [ 30 | { 31 | "as": "linkedCarNo", 32 | "association": "belongsTo", 33 | "foreignKey": "car_no", 34 | "foreignKeyName": "carNoKey", 35 | "ref": "cars" 36 | } 37 | ], 38 | "primaryKeys": ["car_no"], 39 | "options": { 40 | "hasIdColumn": false, 41 | "hasPrimaryKeys": true, 42 | "isJunction": false, 43 | "timestamps": false, 44 | "underscored": true 45 | } 46 | } 47 | } 48 | -------------------------------------------------------------------------------- /test-expected/sequelize/db-analysis-output/users.expected.json: -------------------------------------------------------------------------------- 1 | { 2 | "users": { 3 | "fields": [ 4 | { 5 | "name": "username", 6 | "nameColumn": "username", 7 | "type": "STRING", 8 | "primaryKey": false, 9 | "defaultValue": null, 10 | "isRequired": true 11 | }, 12 | { 13 | "name": "enabled", 14 | "nameColumn": "enabled", 15 | "type": "BOOLEAN", 16 | "primaryKey": false, 17 | "defaultValue": true, 18 | "isRequired": false 19 | }, 20 | { 21 | "name": "lastLogin", 22 | "nameColumn": "last_login", 23 | "type": "DATE", 24 | "primaryKey": false, 25 | "defaultValue": null, 26 | "isRequired": true 27 | } 28 | ], 29 | "references": [ 30 | { 31 | "as": "booksThroughUserBooks", 32 | "association": "belongsToMany", 33 | "ref": "books", 34 | "foreignKey": "user_id", 35 | "foreignKeyName": "userIdKey", 36 | "otherKey": "book_id", 37 | "through": "userBooks" 38 | }, 39 | { 40 | "as": "reviews", 41 | "association": "hasMany", 42 | "ref": "reviews", 43 | "foreignKey": "user_id", 44 | "foreignKeyName": "userIdKey" 45 | }, 46 | { 47 | "as": "address", 48 | "association": "hasOne", 49 | "ref": "addresses", 50 | "foreignKey": "user_id", 51 | "foreignKeyName": "userIdKey" 52 | } 53 | ], 54 | "primaryKeys": ["id"], 55 | "options": { 56 | "hasIdColumn": true, 57 | "hasPrimaryKeys": true, 58 | "isJunction": false, 59 | "timestamps": false, 60 | "underscored": true 61 | } 62 | } 63 | } 64 | -------------------------------------------------------------------------------- /test-expected/sequelize/dumper-output/addresses.expected.js: -------------------------------------------------------------------------------- 1 | // This model was generated by Lumber. However, you remain in control of your models. 2 | // Learn how here: https://docs.forestadmin.com/documentation/v/v6/reference-guide/models/enrich-your-models 3 | module.exports = (sequelize, DataTypes) => { 4 | const { Sequelize } = sequelize; 5 | // This section contains the fields of your model, mapped to your table's columns. 6 | // Learn more here: https://docs.forestadmin.com/documentation/v/v6/reference-guide/models/enrich-your-models#declaring-a-new-field-in-a-model 7 | const Addresses = sequelize.define('addresses', { 8 | userId: { 9 | type: DataTypes.INTEGER, 10 | primaryKey: true, 11 | allowNull: false, 12 | }, 13 | street: { 14 | type: DataTypes.STRING, 15 | allowNull: false, 16 | }, 17 | city: { 18 | type: DataTypes.STRING, 19 | allowNull: false, 20 | }, 21 | state: { 22 | type: DataTypes.STRING, 23 | allowNull: false, 24 | }, 25 | }, { 26 | tableName: 'addresses', 27 | underscored: true, 28 | timestamps: false, 29 | schema: process.env.DATABASE_SCHEMA, 30 | }); 31 | 32 | // This section contains the relationships for this model. See: https://docs.forestadmin.com/documentation/v/v6/reference-guide/relationships#adding-relationships. 33 | Addresses.associate = (models) => { 34 | Addresses.belongsTo(models.users, { 35 | foreignKey: { 36 | name: 'userIdKey', 37 | field: 'user_id', 38 | }, 39 | as: 'user', 40 | }); 41 | }; 42 | 43 | return Addresses; 44 | }; 45 | -------------------------------------------------------------------------------- /test-expected/sequelize/dumper-output/customers.expected.js: -------------------------------------------------------------------------------- 1 | // This model was generated by Lumber. However, you remain in control of your models. 2 | // Learn how here: https://docs.forestadmin.com/documentation/v/v6/reference-guide/models/enrich-your-models 3 | module.exports = (sequelize, DataTypes) => { 4 | const { Sequelize } = sequelize; 5 | // This section contains the fields of your model, mapped to your table's columns. 6 | // Learn more here: https://docs.forestadmin.com/documentation/v/v6/reference-guide/models/enrich-your-models#declaring-a-new-field-in-a-model 7 | const Customers = sequelize.define('customers', { 8 | name: { 9 | type: DataTypes.STRING, 10 | allowNull: false, 11 | }, 12 | description: { 13 | type: DataTypes.STRING, 14 | }, 15 | isActive: { 16 | type: DataTypes.BOOLEAN, 17 | defaultValue: true, 18 | allowNull: false, 19 | }, 20 | paying: { 21 | type: DataTypes.BOOLEAN, 22 | defaultValue: false, 23 | allowNull: false, 24 | }, 25 | createdAt: { 26 | type: DataTypes.DATE, 27 | }, 28 | updatedAt: { 29 | type: DataTypes.DATE, 30 | }, 31 | }, { 32 | tableName: 'customers', 33 | underscored: true, 34 | schema: process.env.DATABASE_SCHEMA, 35 | }); 36 | 37 | // This section contains the relationships for this model. See: https://docs.forestadmin.com/documentation/v/v6/reference-guide/relationships#adding-relationships. 38 | Customers.associate = (models) => { 39 | }; 40 | 41 | return Customers; 42 | }; 43 | -------------------------------------------------------------------------------- /test-expected/sequelize/dumper-output/databases.config.expected.js: -------------------------------------------------------------------------------- 1 | const path = require('path'); 2 | 3 | const databaseOptions = { 4 | logging: !process.env.NODE_ENV || process.env.NODE_ENV === 'development' ? console.log : false, 5 | pool: { maxConnections: 10, minConnections: 1 }, 6 | dialectOptions: {}, 7 | }; 8 | 9 | if (process.env.DATABASE_SSL && JSON.parse(process.env.DATABASE_SSL.toLowerCase())) { 10 | const rejectUnauthorized = process.env.DATABASE_REJECT_UNAUTHORIZED; 11 | if (rejectUnauthorized && (JSON.parse(rejectUnauthorized.toLowerCase()) === false)) { 12 | databaseOptions.dialectOptions.ssl = { rejectUnauthorized: false }; 13 | } else { 14 | databaseOptions.dialectOptions.ssl = true; 15 | } 16 | } 17 | 18 | module.exports = [{ 19 | name: 'default', 20 | modelsDir: path.resolve(__dirname, '../models'), 21 | connection: { 22 | url: process.env.DATABASE_URL, 23 | options: { ...databaseOptions }, 24 | }, 25 | }]; 26 | -------------------------------------------------------------------------------- /test-expected/sequelize/dumper-output/env.darwin.expected: -------------------------------------------------------------------------------- 1 | APPLICATION_PORT=1654 2 | APPLICATION_URL=http://localhost:1654 3 | 4 | CORS_ORIGINS= 5 | 6 | DATABASE_URL=postgres://localhost:27017 7 | DOCKER_DATABASE_URL=postgres://host.docker.internal:27017 8 | DATABASE_SCHEMA=public 9 | DATABASE_SSL=false 10 | # This should be removed in production environment. 11 | DATABASE_REJECT_UNAUTHORIZED=false 12 | 13 | FOREST_AUTH_SECRET= 14 | FOREST_ENV_SECRET= 15 | -------------------------------------------------------------------------------- /test-expected/sequelize/dumper-output/env.linux.expected: -------------------------------------------------------------------------------- 1 | APPLICATION_PORT=1654 2 | APPLICATION_URL=http://localhost:1654 3 | 4 | CORS_ORIGINS= 5 | 6 | DATABASE_URL=postgres://localhost:27017 7 | DATABASE_SCHEMA=public 8 | DATABASE_SSL=false 9 | # This should be removed in production environment. 10 | DATABASE_REJECT_UNAUTHORIZED=false 11 | 12 | FOREST_AUTH_SECRET= 13 | FOREST_ENV_SECRET= 14 | -------------------------------------------------------------------------------- /test-expected/sequelize/dumper-output/export.expected.js: -------------------------------------------------------------------------------- 1 | // This model was generated by Lumber. However, you remain in control of your models. 2 | // Learn how here: https://docs.forestadmin.com/documentation/v/v6/reference-guide/models/enrich-your-models 3 | module.exports = (sequelize, DataTypes) => { 4 | const { Sequelize } = sequelize; 5 | // This section contains the fields of your model, mapped to your table's columns. 6 | // Learn more here: https://docs.forestadmin.com/documentation/v/v6/reference-guide/models/enrich-your-models#declaring-a-new-field-in-a-model 7 | const ModelExport = sequelize.define('modelExport', { 8 | value: { 9 | type: DataTypes.STRING, 10 | }, 11 | }, { 12 | tableName: 'export', 13 | underscored: true, 14 | timestamps: false, 15 | schema: process.env.DATABASE_SCHEMA, 16 | }); 17 | 18 | // This section contains the relationships for this model. See: https://docs.forestadmin.com/documentation/v/v6/reference-guide/relationships#adding-relationships. 19 | ModelExport.associate = (models) => { 20 | }; 21 | 22 | return ModelExport; 23 | }; 24 | -------------------------------------------------------------------------------- /test-expected/sequelize/dumper-output/index.expected.js: -------------------------------------------------------------------------------- 1 | const fs = require('fs'); 2 | const path = require('path'); 3 | const Sequelize = require('sequelize'); 4 | 5 | const databasesConfiguration = require('../config/databases'); 6 | 7 | const connections = {}; 8 | const db = {}; 9 | 10 | databasesConfiguration.forEach((databaseInfo) => { 11 | const connection = new Sequelize(databaseInfo.connection.url, databaseInfo.connection.options); 12 | connections[databaseInfo.name] = connection; 13 | 14 | const modelsDir = databaseInfo.modelsDir || path.join(__dirname, databaseInfo.name); 15 | fs 16 | .readdirSync(modelsDir) 17 | .filter((file) => file.indexOf('.') !== 0 && file !== 'index.js') 18 | .forEach((file) => { 19 | try { 20 | const model = connection.import(path.join(modelsDir, file)); 21 | db[model.name] = model; 22 | } catch (error) { 23 | console.error(`Model creation error: ${error}`); 24 | } 25 | }); 26 | }); 27 | 28 | Object.keys(db).forEach((modelName) => { 29 | if ('associate' in db[modelName]) { 30 | db[modelName].associate(db); 31 | } 32 | }); 33 | 34 | db.objectMapping = Sequelize; 35 | db.connections = connections; 36 | 37 | module.exports = db; 38 | -------------------------------------------------------------------------------- /test-expected/sequelize/dumper-output/only-foreign-keys-and-id.expected.js: -------------------------------------------------------------------------------- 1 | // This model was generated by Lumber. However, you remain in control of your models. 2 | // Learn how here: https://docs.forestadmin.com/documentation/v/v6/reference-guide/models/enrich-your-models 3 | module.exports = (sequelize, DataTypes) => { 4 | const { Sequelize } = sequelize; 5 | // This section contains the fields of your model, mapped to your table's columns. 6 | // Learn more here: https://docs.forestadmin.com/documentation/v/v6/reference-guide/models/enrich-your-models#declaring-a-new-field-in-a-model 7 | const OnlyForeignKeysAndId = sequelize.define('onlyForeignKeysAndId', { 8 | id: { 9 | type: DataTypes.INTEGER, 10 | primaryKey: true, 11 | allowNull: false, 12 | }, 13 | }, { 14 | tableName: 'only_foreign_keys_and_id', 15 | underscored: true, 16 | timestamps: false, 17 | schema: process.env.DATABASE_SCHEMA, 18 | }); 19 | 20 | // This section contains the relationships for this model. See: https://docs.forestadmin.com/documentation/v/v6/reference-guide/relationships#adding-relationships. 21 | OnlyForeignKeysAndId.associate = (models) => { 22 | OnlyForeignKeysAndId.belongsTo(models.sampleTable, { 23 | foreignKey: { 24 | name: 'sampleIdKey', 25 | field: 'sample_id', 26 | }, 27 | as: 'sample', 28 | }); 29 | OnlyForeignKeysAndId.belongsTo(models.cars, { 30 | foreignKey: { 31 | name: 'carIdKey', 32 | field: 'car_id', 33 | }, 34 | as: 'car', 35 | }); 36 | }; 37 | 38 | return OnlyForeignKeysAndId; 39 | }; 40 | -------------------------------------------------------------------------------- /test-expected/sequelize/dumper-output/owners.expected.js: -------------------------------------------------------------------------------- 1 | // This model was generated by Lumber. However, you remain in control of your models. 2 | // Learn how here: https://docs.forestadmin.com/documentation/v/v6/reference-guide/models/enrich-your-models 3 | module.exports = (sequelize, DataTypes) => { 4 | const { Sequelize } = sequelize; 5 | // This section contains the fields of your model, mapped to your table's columns. 6 | // Learn more here: https://docs.forestadmin.com/documentation/v/v6/reference-guide/models/enrich-your-models#declaring-a-new-field-in-a-model 7 | const Owners = sequelize.define('owners', { 8 | name: { 9 | type: DataTypes.STRING, 10 | }, 11 | ownerId: { 12 | type: DataTypes.INTEGER, 13 | allowNull: false, 14 | }, 15 | }, { 16 | tableName: 'owners', 17 | underscored: true, 18 | timestamps: false, 19 | schema: process.env.DATABASE_SCHEMA, 20 | }); 21 | 22 | // This section contains the relationships for this model. See: https://docs.forestadmin.com/documentation/v/v6/reference-guide/relationships#adding-relationships. 23 | Owners.associate = (models) => { 24 | Owners.hasMany(models.projects, { 25 | foreignKey: { 26 | name: 'ownerIdKey', 27 | field: 'owner_id', 28 | }, 29 | sourceKey: 'ownerId', 30 | as: 'projects', 31 | }); 32 | }; 33 | 34 | return Owners; 35 | }; 36 | -------------------------------------------------------------------------------- /test-expected/sequelize/dumper-output/parenthesis.expected.js: -------------------------------------------------------------------------------- 1 | // This model was generated by Lumber. However, you remain in control of your models. 2 | // Learn how here: https://docs.forestadmin.com/documentation/v/v6/reference-guide/models/enrich-your-models 3 | module.exports = (sequelize, DataTypes) => { 4 | const { Sequelize } = sequelize; 5 | // This section contains the fields of your model, mapped to your table's columns. 6 | // Learn more here: https://docs.forestadmin.com/documentation/v/v6/reference-guide/models/enrich-your-models#declaring-a-new-field-in-a-model 7 | const Parenthesis = sequelize.define('parenthesis', { 8 | id: { 9 | type: DataTypes.INTEGER, 10 | primaryKey: true, 11 | allowNull: false, 12 | }, 13 | 'Ingredients (Kcal/100g)': { 14 | type: DataTypes.STRING, 15 | }, 16 | }, { 17 | tableName: 'parenthesis', 18 | timestamps: false, 19 | schema: process.env.DATABASE_SCHEMA, 20 | }); 21 | 22 | // This section contains the relationships for this model. See: https://docs.forestadmin.com/documentation/v/v6/reference-guide/relationships#adding-relationships. 23 | Parenthesis.associate = (models) => { 24 | }; 25 | 26 | return Parenthesis; 27 | }; 28 | -------------------------------------------------------------------------------- /test-expected/sequelize/dumper-output/parenthesis_underscored.expected.js: -------------------------------------------------------------------------------- 1 | // This model was generated by Lumber. However, you remain in control of your models. 2 | // Learn how here: https://docs.forestadmin.com/documentation/v/v6/reference-guide/models/enrich-your-models 3 | module.exports = (sequelize, DataTypes) => { 4 | const { Sequelize } = sequelize; 5 | // This section contains the fields of your model, mapped to your table's columns. 6 | // Learn more here: https://docs.forestadmin.com/documentation/v/v6/reference-guide/models/enrich-your-models#declaring-a-new-field-in-a-model 7 | const ParenthesisUnderscored = sequelize.define('parenthesisUnderscored', { 8 | id: { 9 | type: DataTypes.INTEGER, 10 | primaryKey: true, 11 | allowNull: false, 12 | }, 13 | 'Ingredients (Kcal/100g)': { 14 | type: DataTypes.STRING, 15 | }, 16 | ingredientWeight: { 17 | type: DataTypes.INTEGER, 18 | field: 'ingredient_weight', 19 | allowNull: false, 20 | }, 21 | }, { 22 | tableName: 'parenthesis_underscored', 23 | timestamps: false, 24 | schema: process.env.DATABASE_SCHEMA, 25 | }); 26 | 27 | // This section contains the relationships for this model. See: https://docs.forestadmin.com/documentation/v/v6/reference-guide/relationships#adding-relationships. 28 | ParenthesisUnderscored.associate = (models) => { 29 | }; 30 | 31 | return ParenthesisUnderscored; 32 | }; 33 | -------------------------------------------------------------------------------- /test-expected/sequelize/dumper-output/parenthesis_underscored_true.expected.js: -------------------------------------------------------------------------------- 1 | // This model was generated by Lumber. However, you remain in control of your models. 2 | // Learn how here: https://docs.forestadmin.com/documentation/v/v6/reference-guide/models/enrich-your-models 3 | module.exports = (sequelize, DataTypes) => { 4 | const { Sequelize } = sequelize; 5 | // This section contains the fields of your model, mapped to your table's columns. 6 | // Learn more here: https://docs.forestadmin.com/documentation/v/v6/reference-guide/models/enrich-your-models#declaring-a-new-field-in-a-model 7 | const ParenthesisUnderscoredTrue = sequelize.define('parenthesisUnderscoredTrue', { 8 | id: { 9 | type: DataTypes.INTEGER, 10 | primaryKey: true, 11 | allowNull: false, 12 | }, 13 | 'Ingredients (Kcal/100g)': { 14 | type: DataTypes.STRING, 15 | field: 'Ingredients (Kcal/100g)', 16 | }, 17 | ingredientWeight: { 18 | type: DataTypes.INTEGER, 19 | allowNull: false, 20 | }, 21 | }, { 22 | tableName: 'parenthesis_underscored_true', 23 | underscored: true, 24 | timestamps: false, 25 | schema: process.env.DATABASE_SCHEMA, 26 | }); 27 | 28 | // This section contains the relationships for this model. See: https://docs.forestadmin.com/documentation/v/v6/reference-guide/relationships#adding-relationships. 29 | ParenthesisUnderscoredTrue.associate = (models) => { 30 | }; 31 | 32 | return ParenthesisUnderscoredTrue; 33 | }; 34 | -------------------------------------------------------------------------------- /test-expected/sequelize/dumper-output/projects.expected.js: -------------------------------------------------------------------------------- 1 | // This model was generated by Lumber. However, you remain in control of your models. 2 | // Learn how here: https://docs.forestadmin.com/documentation/v/v6/reference-guide/models/enrich-your-models 3 | module.exports = (sequelize, DataTypes) => { 4 | const { Sequelize } = sequelize; 5 | // This section contains the fields of your model, mapped to your table's columns. 6 | // Learn more here: https://docs.forestadmin.com/documentation/v/v6/reference-guide/models/enrich-your-models#declaring-a-new-field-in-a-model 7 | const Projects = sequelize.define('projects', { 8 | name: { 9 | type: DataTypes.STRING, 10 | }, 11 | }, { 12 | tableName: 'projects', 13 | timestamps: false, 14 | schema: process.env.DATABASE_SCHEMA, 15 | }); 16 | 17 | // This section contains the relationships for this model. See: https://docs.forestadmin.com/documentation/v/v6/reference-guide/relationships#adding-relationships. 18 | Projects.associate = (models) => { 19 | Projects.belongsTo(models.owners, { 20 | foreignKey: { 21 | name: 'ownerIdKey', 22 | field: 'owner_id', 23 | }, 24 | targetKey: 'ownerId', 25 | as: 'owner', 26 | }); 27 | }; 28 | 29 | return Projects; 30 | }; 31 | -------------------------------------------------------------------------------- /test-expected/sequelize/dumper-output/renderings.expected.js: -------------------------------------------------------------------------------- 1 | // This model was generated by Lumber. However, you remain in control of your models. 2 | // Learn how here: https://docs.forestadmin.com/documentation/v/v6/reference-guide/models/enrich-your-models 3 | module.exports = (sequelize, DataTypes) => { 4 | const { Sequelize } = sequelize; 5 | // This section contains the fields of your model, mapped to your table's columns. 6 | // Learn more here: https://docs.forestadmin.com/documentation/v/v6/reference-guide/models/enrich-your-models#declaring-a-new-field-in-a-model 7 | const Renderings = sequelize.define('renderings', { 8 | createdAt: { 9 | type: DataTypes.DATE, 10 | }, 11 | updatedAt: { 12 | type: DataTypes.DATE, 13 | }, 14 | deletedAt: { 15 | type: DataTypes.DATE, 16 | }, 17 | testEnum: { 18 | type: DataTypes.ENUM('LEFT','RIGHT'), 19 | }, 20 | cacheVersion: { 21 | type: DataTypes.STRING, 22 | }, 23 | sections: { 24 | type: DataTypes.JSONB, 25 | defaultValue: [{"name":"Dashboard","isVisible":true},{"name":"Data","isVisible":true}], 26 | }, 27 | collectionsPositions: { 28 | type: DataTypes.ARRAY(DataTypes.INTEGER), 29 | defaultValue: [], 30 | }, 31 | }, { 32 | tableName: 'renderings', 33 | schema: process.env.DATABASE_SCHEMA, 34 | }); 35 | 36 | // This section contains the relationships for this model. See: https://docs.forestadmin.com/documentation/v/v6/reference-guide/relationships#adding-relationships. 37 | Renderings.associate = (models) => { 38 | Renderings.belongsTo(models.environments, { 39 | foreignKey: { 40 | name: 'environmentIdUnconventionnalKey', 41 | field: 'environmentId', 42 | }, 43 | as: 'environment', 44 | }); 45 | Renderings.belongsTo(models.teams, { 46 | foreignKey: { 47 | name: 'teamIdKey', 48 | field: 'teamId', 49 | }, 50 | as: 'team', 51 | }); 52 | Renderings.belongsTo(models.otherWithTarget, { 53 | foreignKey: { 54 | name: 'otherKey', 55 | field: 'other', 56 | }, 57 | targetKey: 'otherId', 58 | as: 'team2', 59 | }); 60 | Renderings.belongsTo(models.film, { 61 | foreignKey: { 62 | name: 'filmKeyNameKey', 63 | field: 'filmKeyName', 64 | }, 65 | targetKey: 'filmTargetKey', 66 | as: 'films', 67 | }); 68 | }; 69 | 70 | return Renderings; 71 | }; 72 | -------------------------------------------------------------------------------- /test-expected/sequelize/dumper-output/users.expected.js: -------------------------------------------------------------------------------- 1 | // This model was generated by Lumber. However, you remain in control of your models. 2 | // Learn how here: https://docs.forestadmin.com/documentation/v/v6/reference-guide/models/enrich-your-models 3 | module.exports = (sequelize, DataTypes) => { 4 | const { Sequelize } = sequelize; 5 | // This section contains the fields of your model, mapped to your table's columns. 6 | // Learn more here: https://docs.forestadmin.com/documentation/v/v6/reference-guide/models/enrich-your-models#declaring-a-new-field-in-a-model 7 | const Users = sequelize.define('users', { 8 | username: { 9 | type: DataTypes.STRING, 10 | allowNull: false, 11 | }, 12 | enabled: { 13 | type: DataTypes.BOOLEAN, 14 | defaultValue: true, 15 | }, 16 | lastLogin: { 17 | type: DataTypes.DATE, 18 | allowNull: false, 19 | }, 20 | }, { 21 | tableName: 'users', 22 | underscored: true, 23 | timestamps: false, 24 | schema: process.env.DATABASE_SCHEMA, 25 | }); 26 | 27 | // This section contains the relationships for this model. See: https://docs.forestadmin.com/documentation/v/v6/reference-guide/relationships#adding-relationships. 28 | Users.associate = (models) => { 29 | Users.belongsToMany(models.books, { 30 | through: 'userBooks', 31 | foreignKey: 'user_id', 32 | otherKey: 'book_id', 33 | as: 'booksThroughUserBooks', 34 | }); 35 | Users.hasMany(models.reviews, { 36 | foreignKey: { 37 | name: 'userIdKey', 38 | field: 'user_id', 39 | }, 40 | as: 'reviews', 41 | }); 42 | Users.hasOne(models.addresses, { 43 | foreignKey: { 44 | name: 'userIdKey', 45 | field: 'user_id', 46 | }, 47 | as: 'address', 48 | }); 49 | }; 50 | 51 | return Users; 52 | }; 53 | -------------------------------------------------------------------------------- /test-fixtures/mongo/deep-nested-model.js: -------------------------------------------------------------------------------- 1 | const { ObjectID } = require('mongodb'); 2 | 3 | const persons = [ 4 | { 5 | _id: ObjectID(), 6 | name: 'James Cameron', 7 | very: { 8 | deep: { 9 | model: { 10 | arrayOfNumber: [1, 2, 3], 11 | arrayMixed: [1, 'two', true, new Date()], 12 | arrayOfObjectIds: [ 13 | ObjectID(), 14 | ObjectID(), 15 | ObjectID(), 16 | ], 17 | arrayWithComplexObject: [ 18 | { 19 | _id: ObjectID(), 20 | name: 'Françis', 21 | propGroup: { 22 | answer: false, 23 | date: new Date(), 24 | sentence: 'Life is beautiful', 25 | number: 1664, 26 | }, 27 | }, 28 | ], 29 | arrayOfComplexObjects: [ 30 | { 31 | _id: ObjectID(), 32 | propGroup: { 33 | answer: false, 34 | date: new Date(), 35 | }, 36 | so: { 37 | nested: { 38 | arrayOfNumber: [1, 2, 3], 39 | arrayMixed: [1, 'two', true, new Date()], 40 | }, 41 | }, 42 | }, 43 | { 44 | _id: ObjectID(), 45 | propGroup: { 46 | sentence: 'Life is beautiful', 47 | number: 1664, 48 | }, 49 | so: { 50 | nested: { 51 | arrayOfNumber: [1, 2, 3], 52 | arrayMixed: [1, 'two', true, new Date()], 53 | }, 54 | }, 55 | }, 56 | ], 57 | }, 58 | }, 59 | }, 60 | }, 61 | ]; 62 | 63 | module.exports = { persons }; 64 | -------------------------------------------------------------------------------- /test-fixtures/mongo/hasmany-model.js: -------------------------------------------------------------------------------- 1 | const { ObjectID } = require('mongodb'); 2 | const _ = require('lodash'); 3 | 4 | const persons = [ 5 | { 6 | _id: ObjectID(), 7 | name: 'James Cameron', 8 | }, 9 | { 10 | _id: ObjectID(), 11 | name: 'Sam Worthington', 12 | }, 13 | { 14 | _id: ObjectID(), 15 | name: 'Zoe Saldana', 16 | }, 17 | ]; 18 | 19 | const films = [ 20 | { 21 | _id: ObjectID(), 22 | title: 'Terminator', 23 | author: _.find(persons, { name: 'James Cameron' })._id, 24 | actors: [ 25 | _.find(persons, { name: 'Sam Worthington' })._id, 26 | _.find(persons, { name: 'Zoe Saldana' })._id, 27 | ], 28 | }, 29 | ]; 30 | 31 | module.exports = { films, persons }; 32 | -------------------------------------------------------------------------------- /test-fixtures/mongo/many-nulls-model.js: -------------------------------------------------------------------------------- 1 | const { ObjectID } = require('mongodb'); 2 | const _ = require('lodash'); 3 | 4 | const persons = [ 5 | { 6 | _id: ObjectID(), 7 | name: 'James Cameron', 8 | }, 9 | ]; 10 | 11 | const films = []; 12 | 13 | for (let i = 0; i < 50; i += 1) { 14 | films.push({ 15 | _id: ObjectID(), 16 | title: `Terminator #${i}`, 17 | author: null, 18 | }); 19 | } 20 | 21 | films.push({ 22 | _id: ObjectID(), 23 | title: 'Terminator', 24 | author: _.find(persons, { name: 'James Cameron' })._id, 25 | }); 26 | 27 | for (let i = 0; i < 50; i += 1) { 28 | films.push({ 29 | _id: ObjectID(), 30 | title: `Terminator 2 #${i}`, 31 | }); 32 | } 33 | 34 | module.exports = { films, persons }; 35 | -------------------------------------------------------------------------------- /test-fixtures/mongo/many-objectid-fields-model.js: -------------------------------------------------------------------------------- 1 | const { ObjectID } = require('mongodb'); 2 | const _ = require('lodash'); 3 | 4 | const persons = [ 5 | { 6 | _id: ObjectID(), 7 | name: 'James Cameron', 8 | }, 9 | { 10 | _id: ObjectID(), 11 | name: 'Big Cameron', 12 | }, 13 | { 14 | _id: ObjectID(), 15 | name: 'Brad Pitt', 16 | }, 17 | ]; 18 | 19 | const films = [ 20 | { 21 | _id: ObjectID(), 22 | title: 'Terminator', 23 | author: _.find(persons, { name: 'James Cameron' })._id, 24 | }, 25 | { 26 | _id: ObjectID(), 27 | title: 'Fight Club', 28 | bestActor: _.find(persons, { name: 'Brad Pitt' })._id, 29 | }, 30 | ]; 31 | 32 | persons[0].dad = persons[1]._id; 33 | persons[1].son = persons[0]._id; 34 | persons[2].cousin = persons[0]._id; 35 | persons[2].cousin = persons[1]._id; 36 | persons[2].preferredFilm = films[1]._id; 37 | persons[1].preferredFilm = null; 38 | 39 | module.exports = { films, persons }; 40 | -------------------------------------------------------------------------------- /test-fixtures/mongo/multiple-nested-array-of-objects-model.js: -------------------------------------------------------------------------------- 1 | const { ObjectID } = require('mongodb'); 2 | 3 | const persons = [ 4 | { 5 | _id: ObjectID(), 6 | name: 'James Cameron', 7 | propArrayOfObjects: [ 8 | { 9 | _id: ObjectID(), 10 | one: 'one', 11 | two: 'two', 12 | }, 13 | { 14 | _id: ObjectID(), 15 | one: '1', 16 | two: '2', 17 | }, 18 | ], 19 | }, 20 | { 21 | _id: ObjectID(), 22 | name: 'James Cameron', 23 | propArrayOfObjects: [ 24 | { 25 | _id: ObjectID(), 26 | one: 1, 27 | two: 'two', 28 | }, 29 | { 30 | _id: ObjectID(), 31 | one: '1', 32 | two: '2', 33 | }, 34 | ], 35 | }, 36 | ]; 37 | 38 | module.exports = { persons }; 39 | -------------------------------------------------------------------------------- /test-fixtures/mongo/multiple-references-same-field-model.js: -------------------------------------------------------------------------------- 1 | const { ObjectID } = require('mongodb'); 2 | const _ = require('lodash'); 3 | 4 | const persons = [ 5 | { 6 | _id: ObjectID(), 7 | name: 'James Cameron', 8 | }, 9 | ]; 10 | 11 | const actors = [ 12 | { 13 | _id: ObjectID(), 14 | name: 'Jim Carrey', 15 | }, 16 | ]; 17 | 18 | const films = [ 19 | { 20 | _id: ObjectID(), 21 | title: 'Terminator', 22 | author: _.find(persons, { name: 'James Cameron' })._id, 23 | refersTo: 'persons', 24 | }, 25 | { 26 | _id: ObjectID(), 27 | title: 'The mask', 28 | author: _.find(actors, { name: 'Jim Carrey' })._id, 29 | refersTo: 'actors', 30 | }, 31 | ]; 32 | 33 | module.exports = { films, persons, actors }; 34 | -------------------------------------------------------------------------------- /test-fixtures/mongo/nested-array-of-numbers-model.js: -------------------------------------------------------------------------------- 1 | const { ObjectID } = require('mongodb'); 2 | 3 | const persons = [ 4 | { 5 | _id: ObjectID(), 6 | name: 'James Cameron', 7 | propArrayOfNumbers: [1, 2, 3], 8 | }, 9 | ]; 10 | 11 | module.exports = { persons }; 12 | -------------------------------------------------------------------------------- /test-fixtures/mongo/nested-array-of-objects-model.js: -------------------------------------------------------------------------------- 1 | const { ObjectID } = require('mongodb'); 2 | 3 | const persons = [ 4 | { 5 | _id: ObjectID(), 6 | name: 'James Cameron', 7 | propArrayOfObjects: [ 8 | { 9 | _id: ObjectID(), 10 | one: 'one', 11 | two: 'two', 12 | }, 13 | { 14 | _id: ObjectID(), 15 | one: '1', 16 | two: '2', 17 | }, 18 | ], 19 | }, 20 | ]; 21 | 22 | module.exports = { persons }; 23 | -------------------------------------------------------------------------------- /test-fixtures/mongo/nested-object-model.js: -------------------------------------------------------------------------------- 1 | const { ObjectID } = require('mongodb'); 2 | 3 | const persons = [ 4 | { 5 | _id: ObjectID(), 6 | name: 'James Cameron', 7 | propGroup: { 8 | answer: false, 9 | date: new Date(), 10 | sentence: 'Life is beautiful', 11 | number: 1664, 12 | }, 13 | }, 14 | ]; 15 | 16 | module.exports = { persons }; 17 | -------------------------------------------------------------------------------- /test-fixtures/mongo/simple-model.js: -------------------------------------------------------------------------------- 1 | const { ObjectID } = require('mongodb'); 2 | const _ = require('lodash'); 3 | 4 | const persons = [ 5 | { 6 | _id: ObjectID(), 7 | name: 'James Cameron', 8 | }, 9 | ]; 10 | 11 | const films = [ 12 | { 13 | _id: ObjectID(), 14 | title: 'Terminator', 15 | author: _.find(persons, { name: 'James Cameron' })._id, 16 | }, 17 | ]; 18 | 19 | module.exports = { films, persons }; 20 | -------------------------------------------------------------------------------- /test-fixtures/mongo/sub-document-not-using-ids-model.js: -------------------------------------------------------------------------------- 1 | const { ObjectID } = require('mongodb'); 2 | 3 | const persons = [ 4 | { 5 | _id: ObjectID(), 6 | name: 'James Cameron', 7 | propArrayOfObjects: [ 8 | { 9 | sampleValue: 'sample', 10 | 'complex name': 'sample', 11 | }, 12 | ], 13 | }, 14 | ]; 15 | 16 | module.exports = { persons }; 17 | -------------------------------------------------------------------------------- /test-fixtures/mongo/sub-document-using-ids-model.js: -------------------------------------------------------------------------------- 1 | const { ObjectID } = require('mongodb'); 2 | 3 | const persons = [ 4 | { 5 | _id: ObjectID(), 6 | name: 'James Cameron', 7 | propArrayOfObjects: [ 8 | { 9 | _id: ObjectID(), 10 | sampleValue: 'sample value', 11 | 'complex name': 'sample', 12 | }, 13 | ], 14 | }, 15 | ]; 16 | 17 | module.exports = { persons }; 18 | -------------------------------------------------------------------------------- /test-fixtures/mongo/sub-documents-ambiguous-ids-model.js: -------------------------------------------------------------------------------- 1 | const { ObjectID } = require('mongodb'); 2 | 3 | const persons = [ 4 | { 5 | _id: ObjectID(), 6 | name: 'James Cameron', 7 | propArrayOfObjects: [ 8 | { 9 | sampleValue: 'sample', 10 | }, 11 | { 12 | _id: ObjectID(), 13 | sampleValue: 'sample value', 14 | 'complex name': 'sample', 15 | }, 16 | ], 17 | }, 18 | ]; 19 | 20 | module.exports = { persons }; 21 | -------------------------------------------------------------------------------- /test-fixtures/mongo/sub-documents-not-using-ids-model.js: -------------------------------------------------------------------------------- 1 | const { ObjectID } = require('mongodb'); 2 | 3 | const persons = [ 4 | { 5 | _id: ObjectID(), 6 | name: 'James Cameron', 7 | propArrayOfObjects: [ 8 | { 9 | sampleValue: 'sample', 10 | 'complex name': 'sample', 11 | }, 12 | { 13 | sampleValue: 'sample', 14 | }, 15 | ], 16 | }, 17 | ]; 18 | 19 | module.exports = { persons }; 20 | -------------------------------------------------------------------------------- /test-fixtures/mongo/sub-documents-using-ids-model.js: -------------------------------------------------------------------------------- 1 | const { ObjectID } = require('mongodb'); 2 | 3 | const persons = [ 4 | { 5 | _id: ObjectID(), 6 | name: 'James Cameron', 7 | propArrayOfObjects: [ 8 | { 9 | _id: ObjectID(), 10 | 'complex name': 'sample', 11 | sampleValue: 'sample', 12 | }, 13 | { 14 | _id: ObjectID(), 15 | sampleValue: 'sample value', 16 | }, 17 | ], 18 | }, 19 | ]; 20 | 21 | module.exports = { persons }; 22 | -------------------------------------------------------------------------------- /test-fixtures/mssql/addresses.sql: -------------------------------------------------------------------------------- 1 | CREATE TABLE [dbo].addresses ( 2 | user_id INT NOT NULL, 3 | street VARCHAR(30) NOT NULL, 4 | city VARCHAR(30) NOT NULL, 5 | state VARCHAR(30) NOT NULL, 6 | CONSTRAINT addresses_pkey PRIMARY KEY (user_id), 7 | CONSTRAINT fk_user_id FOREIGN KEY (user_id) REFERENCES [dbo].users(id), 8 | CONSTRAINT unique_city UNIQUE (city) 9 | ); 10 | -------------------------------------------------------------------------------- /test-fixtures/mssql/books.sql: -------------------------------------------------------------------------------- 1 | CREATE TABLE [dbo].books ( 2 | id INT NOT NULL, 3 | title VARCHAR(100) NOT NULL, 4 | author VARCHAR(100) NOT NULL, 5 | published_date DATETIME NOT NULL, 6 | isbn INT, 7 | CONSTRAINT books_pkey PRIMARY KEY (id), 8 | CONSTRAINT books_isbn_key UNIQUE (isbn ASC) 9 | ); 10 | -------------------------------------------------------------------------------- /test-fixtures/mssql/cars.sql: -------------------------------------------------------------------------------- 1 | CREATE TABLE cars ( 2 | id INT NOT NULL, 3 | model VARCHAR(25) NOT NULL, 4 | CONSTRAINT pk_cars PRIMARY KEY (id) 5 | ); 6 | -------------------------------------------------------------------------------- /test-fixtures/mssql/customers.sql: -------------------------------------------------------------------------------- 1 | CREATE TABLE [dbo].customers ( 2 | id BIGINT IDENTITY(1,1) NOT NULL, 3 | name VARCHAR(255) NOT NULL, 4 | description TEXT, 5 | is_active BIT DEFAULT 1 NOT NULL, 6 | paying BIT DEFAULT 0 NOT NULL, 7 | created_at DATETIME NOT NULL, 8 | updated_at DATETIME NOT NULL, 9 | CONSTRAINT pk_customers PRIMARY KEY (id) 10 | ); 11 | -------------------------------------------------------------------------------- /test-fixtures/mssql/default_values.sql: -------------------------------------------------------------------------------- 1 | CREATE TABLE default_values ( 2 | id INT NOT NULL, 3 | bool_null BIT DEFAULT NULL, 4 | bool_cst BIT DEFAULT 1, 5 | int_cst INTEGER DEFAULT 42, 6 | str_null VARCHAR(25) DEFAULT NULL, 7 | str_cst VARCHAR(25) DEFAULT 'co''nst''ant', 8 | str_expr VARCHAR(25) DEFAULT UPPER(CONCAT('Hello', 'World')), 9 | date_null DATETIME DEFAULT NULL, 10 | date_cst1 DATETIME DEFAULT '2015-05-11 13:01:01', 11 | date_cst2 DATETIME DEFAULT '1983-05-27', 12 | date_expr1 DATETIME DEFAULT getutcdate(), 13 | date_expr2 DATETIME DEFAULT CURRENT_TIMESTAMP, 14 | CONSTRAINT pk_def PRIMARY KEY(id) 15 | ); -------------------------------------------------------------------------------- /test-fixtures/mssql/doubleref.sql: -------------------------------------------------------------------------------- 1 | DROP TABLE IF EXISTS doubleref; 2 | 3 | CREATE TABLE doubleref ( 4 | car_no INT NOT NULL, 5 | no_car INT NOT NULL, 6 | CONSTRAINT doubleref_car_id_fkey FOREIGN KEY (car_no) REFERENCES [dbo].cars(id), 7 | CONSTRAINT doubleref_id_car_fkey FOREIGN KEY (no_car) REFERENCES [dbo].cars(id), 8 | CONSTRAINT doubleref_pkey PRIMARY KEY (car_no) 9 | ); 10 | 11 | ALTER TABLE doubleref 12 | ADD CONSTRAINT same_car_no_fk 13 | FOREIGN KEY (car_no) 14 | REFERENCES [dbo].cars(id); 15 | -------------------------------------------------------------------------------- /test-fixtures/mssql/duplicatedalias.sql: -------------------------------------------------------------------------------- 1 | DROP TABLE IF EXISTS roles; 2 | DROP TABLE IF EXISTS joinroles; 3 | DROP TABLE IF EXISTS project; 4 | 5 | CREATE TABLE project ( 6 | id INT PRIMARY KEY 7 | ); 8 | 9 | CREATE TABLE joinroles ( 10 | id INT PRIMARY KEY, 11 | project INT, 12 | project_id INT, 13 | CONSTRAINT joinroles_project_fk FOREIGN KEY (project) REFERENCES [dbo].project(id) ON DELETE CASCADE, 14 | CONSTRAINT joinroles_project_id_fk FOREIGN KEY (project_id) REFERENCES [dbo].project(id) 15 | ); 16 | 17 | CREATE TABLE roles ( 18 | id INT PRIMARY KEY, 19 | name VARCHAR(25), 20 | project INT, 21 | project_id INT, 22 | CONSTRAINT roles_project_fk FOREIGN KEY (project) REFERENCES [dbo].project(id) ON DELETE CASCADE, 23 | CONSTRAINT roles_project_id_fk FOREIGN KEY (project_id) REFERENCES [dbo].project(id) 24 | ); 25 | -------------------------------------------------------------------------------- /test-fixtures/mssql/only_foreign_keys_and_id.sql: -------------------------------------------------------------------------------- 1 | CREATE TABLE only_foreign_keys_and_id ( 2 | id INT PRIMARY KEY, 3 | sample_id INT, 4 | car_id INT, 5 | CONSTRAINT fk_sample_id FOREIGN KEY (sample_id) REFERENCES sample_table (id), 6 | CONSTRAINT fk_car_id FOREIGN KEY (car_id) REFERENCES cars (id) 7 | ); 8 | 9 | -------------------------------------------------------------------------------- /test-fixtures/mssql/owners.sql: -------------------------------------------------------------------------------- 1 | CREATE TABLE [dbo].owners ( 2 | id INT NOT NULL, 3 | name VARCHAR(25), 4 | owner_id INT NOT NULL, 5 | CONSTRAINT owners_pkey PRIMARY KEY (id), 6 | CONSTRAINT owner_owner_id_uindex UNIQUE (owner_id) 7 | ); 8 | -------------------------------------------------------------------------------- /test-fixtures/mssql/parenthesis_table.sql: -------------------------------------------------------------------------------- 1 | CREATE TABLE [dbo].parenthesis_table ( 2 | id INT NOT NULL, 3 | [Ingredients (Kcal/100g)] VARCHAR(100) , 4 | PRIMARY KEY (id) 5 | ); 6 | -------------------------------------------------------------------------------- /test-fixtures/mssql/parenthesis_underscored_table.sql: -------------------------------------------------------------------------------- 1 | CREATE TABLE [dbo].parenthesis_underscored_table ( 2 | id INT NOT NULL, 3 | [Ingredients (Kcal/100g)] VARCHAR(100), 4 | ingredient_weight INT NOT NULL, 5 | PRIMARY KEY (id) 6 | ); 7 | -------------------------------------------------------------------------------- /test-fixtures/mssql/projects.sql: -------------------------------------------------------------------------------- 1 | CREATE TABLE [dbo].projects ( 2 | id INT NOT NULL, 3 | name VARCHAR(25), 4 | owner_id INT NOT NULL, 5 | CONSTRAINT projects_pkey PRIMARY KEY (id), 6 | CONSTRAINT fk_owner_id FOREIGN KEY (owner_id) REFERENCES [dbo].owners (owner_id) 7 | ); 8 | -------------------------------------------------------------------------------- /test-fixtures/mssql/rentals.sql: -------------------------------------------------------------------------------- 1 | CREATE TABLE rentals ( 2 | car_no INT NOT NULL, 3 | from_date DATE NOT NULL, 4 | to_date DATE NOT NULL, 5 | CONSTRAINT rentals_pkey PRIMARY KEY (car_no), 6 | CONSTRAINT rentals_car_id_fkey FOREIGN KEY (car_no) REFERENCES [dbo].cars(id) 7 | ); 8 | -------------------------------------------------------------------------------- /test-fixtures/mssql/reviews.sql: -------------------------------------------------------------------------------- 1 | CREATE TABLE [dbo].reviews ( 2 | id INT NOT NULL, 3 | book_id INT NOT NULL, 4 | user_id INT NOT NULL, 5 | review_content VARCHAR(255), 6 | rating INT, 7 | published_date DATETIME, 8 | CONSTRAINT reviews_pkey PRIMARY KEY (id), 9 | CONSTRAINT reviews_book_id_fkey FOREIGN KEY (book_id) REFERENCES [dbo].books(id) ON DELETE CASCADE ON UPDATE NO ACTION, 10 | CONSTRAINT reviews_user_id_fkey FOREIGN KEY (user_id) REFERENCES [dbo].users(id) ON DELETE CASCADE ON UPDATE NO ACTION, 11 | CONSTRAINT published_date_rating_key UNIQUE (published_date, rating) 12 | ); 13 | -------------------------------------------------------------------------------- /test-fixtures/mssql/sample_table.sql: -------------------------------------------------------------------------------- 1 | CREATE TABLE [dbo].sample_table ( 2 | id INT NOT NULL, 3 | PRIMARY KEY (id) 4 | ); 5 | -------------------------------------------------------------------------------- /test-fixtures/mssql/underscored_no_fields.sql: -------------------------------------------------------------------------------- 1 | CREATE TABLE [dbo].underscored_no_fields ( 2 | id INT NOT NULL, 3 | sample_table_id INT NOT NULL, 4 | PRIMARY KEY (id), 5 | CONSTRAINT underscored_no_fields_sample_table_id_fkey FOREIGN KEY (sample_table_id) REFERENCES [dbo].sample_table(id), 6 | ); 7 | -------------------------------------------------------------------------------- /test-fixtures/mssql/user_books.sql: -------------------------------------------------------------------------------- 1 | CREATE TABLE [dbo].user_books ( 2 | user_id INT NOT NULL, 3 | book_id INT NOT NULL, 4 | creation_date DATETIME, 5 | update_date DATETIME, 6 | CONSTRAINT user_books_book_id_fkey FOREIGN KEY (book_id) REFERENCES [dbo].books(id), 7 | CONSTRAINT user_books_user_id_fkey FOREIGN KEY (user_id) REFERENCES [dbo].users(id) 8 | ); 9 | -------------------------------------------------------------------------------- /test-fixtures/mssql/users.sql: -------------------------------------------------------------------------------- 1 | CREATE TABLE [dbo].users ( 2 | id INT NOT NULL, 3 | username VARCHAR(25) NOT NULL, 4 | enabled BIT DEFAULT 1, 5 | last_login DATETIME NOT NULL, 6 | CONSTRAINT users_pkey PRIMARY KEY (id) 7 | ); 8 | -------------------------------------------------------------------------------- /test-fixtures/mysql/addresses.sql: -------------------------------------------------------------------------------- 1 | CREATE TABLE addresses ( 2 | user_id INT NOT NULL PRIMARY KEY, 3 | street VARCHAR(30) NOT NULL, 4 | city VARCHAR(30) NOT NULL, 5 | state VARCHAR(30) NOT NULL, 6 | CONSTRAINT fk_user_id FOREIGN KEY (user_id) REFERENCES users (id), 7 | CONSTRAINT unique_city UNIQUE (city) 8 | ); 9 | -------------------------------------------------------------------------------- /test-fixtures/mysql/books.sql: -------------------------------------------------------------------------------- 1 | CREATE TABLE books ( 2 | id INT AUTO_INCREMENT NOT NULL PRIMARY KEY, 3 | title VARCHAR(100) NOT NULL, 4 | author VARCHAR(100) NOT NULL, 5 | published_date DATETIME NOT NULL, 6 | isbn INT, 7 | CONSTRAINT books_isbn_key UNIQUE (isbn ASC) 8 | ); 9 | -------------------------------------------------------------------------------- /test-fixtures/mysql/cars.sql: -------------------------------------------------------------------------------- 1 | CREATE TABLE cars ( 2 | id INT AUTO_INCREMENT PRIMARY KEY, 3 | model VARCHAR(25) NOT NULL 4 | ); 5 | -------------------------------------------------------------------------------- /test-fixtures/mysql/customers.sql: -------------------------------------------------------------------------------- 1 | create table customers ( 2 | id BIGINT UNSIGNED AUTO_INCREMENT PRIMARY KEY, 3 | name VARCHAR(255) NOT NULL, 4 | description TEXT, 5 | is_active BIT(1) DEFAULT 1 NOT NULL, 6 | paying BIT(1) DEFAULT 0 NOT NULL, 7 | created_at DATETIME NOT NULL, 8 | updated_at DATETIME NOT NULL 9 | ); 10 | -------------------------------------------------------------------------------- /test-fixtures/mysql/default_values.sql: -------------------------------------------------------------------------------- 1 | CREATE TABLE default_values ( 2 | id SERIAL, 3 | bool_null BIT(1) DEFAULT NULL, 4 | bool_cst BIT(1) DEFAULT TRUE, 5 | int_cst INTEGER DEFAULT 42, 6 | str_null VARCHAR(25) DEFAULT NULL, 7 | str_cst VARCHAR(25) DEFAULT 'co''nst''ant', 8 | str_expr VARCHAR(25) DEFAULT (UPPER(CONCAT('Hello', 'World'))), 9 | date_null TIMESTAMP DEFAULT NULL, 10 | date_cst1 TIMESTAMP DEFAULT '2015-05-11 13:01:01', 11 | date_cst2 DATE DEFAULT '1983-05-27', 12 | date_expr1 TIMESTAMP DEFAULT NOW(), 13 | date_expr2 TIMESTAMP DEFAULT CURRENT_TIMESTAMP, 14 | enum_cst1 ENUM('a', 'b', 'c') DEFAULT NULL, 15 | enum_cst2 ENUM('a', 'b', 'c') DEFAULT 'a', 16 | json_cst JSON DEFAULT ('{"a":1,"b":2}'), 17 | PRIMARY KEY(id) 18 | ); -------------------------------------------------------------------------------- /test-fixtures/mysql/doubleref.sql: -------------------------------------------------------------------------------- 1 | DROP TABLE IF EXISTS doubleref; 2 | 3 | CREATE TABLE doubleref ( 4 | car_no INT NOT NULL, 5 | no_car INT NOT NULL, 6 | FOREIGN KEY (car_no) REFERENCES cars(id), 7 | FOREIGN KEY (no_car) REFERENCES cars(id), 8 | PRIMARY KEY (car_no) 9 | ); 10 | 11 | ALTER TABLE doubleref 12 | ADD CONSTRAINT same_car_no_fk 13 | FOREIGN KEY (car_no) 14 | REFERENCES cars(id); 15 | -------------------------------------------------------------------------------- /test-fixtures/mysql/duplicatedalias.sql: -------------------------------------------------------------------------------- 1 | DROP TABLE IF EXISTS roles; 2 | DROP TABLE IF EXISTS joinroles; 3 | DROP TABLE IF EXISTS project; 4 | 5 | CREATE TABLE project ( 6 | id INT PRIMARY KEY 7 | ); 8 | 9 | CREATE TABLE joinroles ( 10 | id INT PRIMARY KEY, 11 | project INT, 12 | project_id INT, 13 | FOREIGN KEY (project) REFERENCES project(id) ON DELETE CASCADE, 14 | FOREIGN KEY (project_id) REFERENCES project(id) ON DELETE CASCADE 15 | ); 16 | 17 | CREATE TABLE roles ( 18 | id INT PRIMARY KEY, 19 | name VARCHAR(25), 20 | project INT, 21 | project_id INT, 22 | FOREIGN KEY (project) REFERENCES project(id) ON DELETE CASCADE, 23 | FOREIGN KEY (project_id) REFERENCES project(id) ON DELETE CASCADE 24 | ); 25 | -------------------------------------------------------------------------------- /test-fixtures/mysql/json.sql: -------------------------------------------------------------------------------- 1 | create table json ( 2 | id INT NOT NULL PRIMARY KEY, 3 | object JSON 4 | ); 5 | -------------------------------------------------------------------------------- /test-fixtures/mysql/only_foreign_keys_and_id.sql: -------------------------------------------------------------------------------- 1 | CREATE TABLE only_foreign_keys_and_id ( 2 | id INT AUTO_INCREMENT PRIMARY KEY, 3 | sample_id INT, 4 | car_id INT, 5 | FOREIGN KEY (sample_id) REFERENCES sample_table (id), 6 | FOREIGN KEY (car_id) REFERENCES cars (id) 7 | ); 8 | -------------------------------------------------------------------------------- /test-fixtures/mysql/owners.sql: -------------------------------------------------------------------------------- 1 | CREATE TABLE owners ( 2 | id INT AUTO_INCREMENT PRIMARY KEY, 3 | name VARCHAR(25), 4 | owner_id INT NOT NULL, 5 | CONSTRAINT owner_owner_id_uindex UNIQUE (owner_id) 6 | ); 7 | -------------------------------------------------------------------------------- /test-fixtures/mysql/parenthesis_table.sql: -------------------------------------------------------------------------------- 1 | CREATE TABLE parenthesis_table ( 2 | id INT NOT NULL, 3 | `Ingredients (Kcal/100g)` VARCHAR(100), 4 | PRIMARY KEY (id) 5 | ); 6 | -------------------------------------------------------------------------------- /test-fixtures/mysql/parenthesis_underscored_table.sql: -------------------------------------------------------------------------------- 1 | CREATE TABLE parenthesis_underscored_table ( 2 | id INT NOT NULL, 3 | `Ingredients (Kcal/100g)` VARCHAR(100), 4 | ingredient_weight INT NOT NULL, 5 | PRIMARY KEY (id) 6 | ); 7 | -------------------------------------------------------------------------------- /test-fixtures/mysql/projects.sql: -------------------------------------------------------------------------------- 1 | CREATE TABLE projects ( 2 | id INT AUTO_INCREMENT PRIMARY KEY, 3 | name VARCHAR(25), 4 | owner_id INT NOT NULL, 5 | CONSTRAINT fk_owner_id FOREIGN KEY (owner_id) REFERENCES owners (owner_id) 6 | ); 7 | -------------------------------------------------------------------------------- /test-fixtures/mysql/rentals.sql: -------------------------------------------------------------------------------- 1 | CREATE TABLE rentals ( 2 | car_no INT NOT NULL, 3 | from_date DATE NOT NULL, 4 | to_date DATE NOT NULL, 5 | PRIMARY KEY (car_no), 6 | FOREIGN KEY (car_no) REFERENCES cars (id) 7 | ); 8 | -------------------------------------------------------------------------------- /test-fixtures/mysql/reviews.sql: -------------------------------------------------------------------------------- 1 | CREATE TABLE reviews ( 2 | id INT AUTO_INCREMENT NOT NULL PRIMARY KEY, 3 | book_id INT NOT NULL, 4 | user_id INT NOT NULL, 5 | review_content VARCHAR(255), 6 | rating INT, 7 | published_date DATETIME, 8 | CONSTRAINT reviews_book_id_fkey FOREIGN KEY (book_id) REFERENCES books(id) ON DELETE CASCADE ON UPDATE NO ACTION, 9 | CONSTRAINT reviews_user_id_fkey FOREIGN KEY (user_id) REFERENCES users(id) ON DELETE CASCADE ON UPDATE NO ACTION, 10 | CONSTRAINT published_date_rating_key UNIQUE (published_date, rating) 11 | ); 12 | -------------------------------------------------------------------------------- /test-fixtures/mysql/sample_table.sql: -------------------------------------------------------------------------------- 1 | CREATE TABLE sample_table ( 2 | id INT NOT NULL, 3 | PRIMARY KEY (id) 4 | ); 5 | -------------------------------------------------------------------------------- /test-fixtures/mysql/underscored_no_fields.sql: -------------------------------------------------------------------------------- 1 | CREATE TABLE underscored_no_fields ( 2 | id INT NOT NULL, 3 | sample_table_id INT NOT NULL, 4 | PRIMARY KEY (id), 5 | CONSTRAINT underscored_no_fields_sample_table_id_fkey FOREIGN KEY (sample_table_id) REFERENCES sample_table(id) ON DELETE NO ACTION ON UPDATE CASCADE 6 | ); 7 | -------------------------------------------------------------------------------- /test-fixtures/mysql/user_books.sql: -------------------------------------------------------------------------------- 1 | CREATE TABLE user_books ( 2 | user_id INT NOT NULL, 3 | book_id INT NOT NULL, 4 | creation_date DATETIME, 5 | update_date DATETIME, 6 | CONSTRAINT user_books_book_id_fkey FOREIGN KEY (book_id) REFERENCES books(id) ON DELETE NO ACTION ON UPDATE CASCADE, 7 | CONSTRAINT user_books_user_id_fkey FOREIGN KEY (user_id) REFERENCES users(id) ON DELETE NO ACTION ON UPDATE CASCADE 8 | ); 9 | -------------------------------------------------------------------------------- /test-fixtures/mysql/users.sql: -------------------------------------------------------------------------------- 1 | CREATE TABLE users ( 2 | id INT AUTO_INCREMENT PRIMARY KEY, 3 | username VARCHAR(25) NOT NULL, 4 | enabled BIT(1) DEFAULT 1, 5 | last_login DATETIME NOT NULL 6 | ); 7 | -------------------------------------------------------------------------------- /test-fixtures/postgres/addresses.sql: -------------------------------------------------------------------------------- 1 | CREATE TABLE addresses ( 2 | user_id INT NOT NULL, 3 | street VARCHAR(30) NOT NULL, 4 | city VARCHAR(30) NOT NULL CONSTRAINT unique_city UNIQUE, 5 | state VARCHAR(30) NOT NULL, 6 | PRIMARY KEY (user_id), 7 | CONSTRAINT fk_user_id FOREIGN KEY (user_id) REFERENCES users (id) 8 | ); 9 | -------------------------------------------------------------------------------- /test-fixtures/postgres/books.sql: -------------------------------------------------------------------------------- 1 | CREATE TABLE books ( 2 | id SERIAL, 3 | title VARCHAR(100) NOT NULL, 4 | author VARCHAR(100) NOT NULL, 5 | published_date TIMESTAMP NOT NULL, 6 | isbn INT, 7 | PRIMARY KEY (id), 8 | UNIQUE (isbn) 9 | ); 10 | -------------------------------------------------------------------------------- /test-fixtures/postgres/cars.sql: -------------------------------------------------------------------------------- 1 | CREATE TABLE cars ( 2 | id SERIAL, 3 | model VARCHAR(25) NOT NULL, 4 | PRIMARY KEY(id) 5 | ); 6 | -------------------------------------------------------------------------------- /test-fixtures/postgres/customers.sql: -------------------------------------------------------------------------------- 1 | create table customers ( 2 | id BIGSERIAL CONSTRAINT customers_pk PRIMARY KEY, 3 | name VARCHAR NOT NULL, 4 | description TEXT, 5 | is_active BOOLEAN DEFAULT true NOT NULL, 6 | paying BOOLEAN DEFAULT false NOT NULL, 7 | created_at TIMESTAMP NOT NULL, 8 | updated_at TIMESTAMP NOT NULL 9 | ); 10 | -------------------------------------------------------------------------------- /test-fixtures/postgres/default_values.sql: -------------------------------------------------------------------------------- 1 | DROP TYPE IF EXISTS default_values_enum; 2 | 3 | CREATE TYPE default_values_enum AS ENUM ('a', 'b', 'c'); 4 | 5 | CREATE TABLE default_values ( 6 | id SERIAL, 7 | bool_null BOOLEAN DEFAULT NULL, 8 | bool_cst BOOLEAN DEFAULT TRUE, 9 | int_cst INTEGER DEFAULT 42, 10 | str_null VARCHAR(25) DEFAULT NULL, 11 | str_cst VARCHAR(25) DEFAULT 'co''nst''ant', 12 | str_expr VARCHAR(25) DEFAULT UPPER('Hello' || 'World'), 13 | date_null TIMESTAMP DEFAULT NULL, 14 | date_cst1 TIMESTAMP DEFAULT '2010-01-01T00:00:00Z', 15 | date_cst2 TIMESTAMP DEFAULT '1983-05-27', 16 | date_expr1 TIMESTAMP DEFAULT CURRENT_TIMESTAMP, 17 | date_expr2 TIMESTAMP DEFAULT now(), 18 | date_expr3 TIMESTAMP DEFAULT timezone('utc', now()), 19 | enum_cst1 default_values_enum DEFAULT NULL, 20 | enum_cst2 default_values_enum DEFAULT 'a', 21 | array_cst1 INTEGER [] DEFAULT '{25000,25000,27000,27000}', 22 | array_cst2 INTEGER [] DEFAULT ARRAY [25000,25000,27000,27000], 23 | json_cst JSON DEFAULT '{"a":1,"b":2}' :: json, 24 | jsonb_cst JSONB DEFAULT '{"a":1,"b":2}' :: jsonb, 25 | PRIMARY KEY(id) 26 | ); -------------------------------------------------------------------------------- /test-fixtures/postgres/doubleref.sql: -------------------------------------------------------------------------------- 1 | DROP TABLE IF EXISTS doubleref; 2 | 3 | CREATE TABLE doubleref ( 4 | car_no INT NOT NULL, 5 | no_car INT NOT NULL, 6 | FOREIGN KEY (car_no) REFERENCES cars(id), 7 | FOREIGN KEY (no_car) REFERENCES cars(id), 8 | PRIMARY KEY (car_no) 9 | ); 10 | 11 | ALTER TABLE doubleref 12 | ADD CONSTRAINT same_car_no_fk 13 | FOREIGN KEY (car_no) 14 | REFERENCES cars(id); 15 | -------------------------------------------------------------------------------- /test-fixtures/postgres/duplicatedalias.sql: -------------------------------------------------------------------------------- 1 | DROP TABLE IF EXISTS roles; 2 | DROP TABLE IF EXISTS joinroles; 3 | DROP TABLE IF EXISTS project; 4 | 5 | CREATE TABLE project ( 6 | id INT PRIMARY KEY 7 | ); 8 | 9 | CREATE TABLE joinroles ( 10 | id INT PRIMARY KEY, 11 | project INT, 12 | project_id INT, 13 | FOREIGN KEY (project) REFERENCES project(id) ON DELETE CASCADE, 14 | FOREIGN KEY (project_id) REFERENCES project(id) ON DELETE CASCADE 15 | ); 16 | 17 | CREATE TABLE roles ( 18 | id INT PRIMARY KEY, 19 | name VARCHAR, 20 | project INT, 21 | project_id INT, 22 | FOREIGN KEY (project) REFERENCES project(id) ON DELETE CASCADE, 23 | FOREIGN KEY (project_id) REFERENCES project(id) ON DELETE CASCADE 24 | ); 25 | -------------------------------------------------------------------------------- /test-fixtures/postgres/employees.sql: -------------------------------------------------------------------------------- 1 | create table employees ( 2 | name varchar, 3 | pay_by_quarter integer ARRAY 4 | ); 5 | -------------------------------------------------------------------------------- /test-fixtures/postgres/json.sql: -------------------------------------------------------------------------------- 1 | create table json ( 2 | id SERIAL, 3 | object JSON 4 | ); 5 | -------------------------------------------------------------------------------- /test-fixtures/postgres/only_foreign_keys_and_id.sql: -------------------------------------------------------------------------------- 1 | CREATE TABLE only_foreign_keys_and_id ( 2 | id SERIAL PRIMARY KEY, 3 | sample_id INT, 4 | car_id INT, 5 | FOREIGN KEY (sample_id) REFERENCES sample_table(id), 6 | FOREIGN KEY (car_id) REFERENCES cars(id) 7 | ); 8 | -------------------------------------------------------------------------------- /test-fixtures/postgres/owners.sql: -------------------------------------------------------------------------------- 1 | create table owners ( 2 | id serial primary key, 3 | name varchar, 4 | owner_id integer not null 5 | ); 6 | 7 | create unique index owners_owner_id_uindex on owners (owner_id); 8 | 9 | -------------------------------------------------------------------------------- /test-fixtures/postgres/parenthesis_table.sql: -------------------------------------------------------------------------------- 1 | CREATE TABLE parenthesis_table ( 2 | id INT NOT NULL, 3 | "Ingredients (Kcal/100g)" VARCHAR, 4 | PRIMARY KEY (id) 5 | ); 6 | -------------------------------------------------------------------------------- /test-fixtures/postgres/parenthesis_underscored_table.sql: -------------------------------------------------------------------------------- 1 | CREATE TABLE parenthesis_underscored_table ( 2 | id INT NOT NULL, 3 | "Ingredients (Kcal/100g)" VARCHAR, 4 | ingredient_weight INT NOT NULL, 5 | PRIMARY KEY (id) 6 | ); 7 | -------------------------------------------------------------------------------- /test-fixtures/postgres/projects.sql: -------------------------------------------------------------------------------- 1 | create table projects ( 2 | id serial primary key, 3 | name varchar, 4 | owner_id integer not null constraint owner_project_pk references owners (owner_id) 5 | ); 6 | 7 | 8 | -------------------------------------------------------------------------------- /test-fixtures/postgres/rentals.sql: -------------------------------------------------------------------------------- 1 | CREATE TABLE rentals ( 2 | car_no INT NOT NULL, 3 | from_date DATE NOT NULL, 4 | to_date DATE NOT NULL, 5 | FOREIGN KEY (car_no) REFERENCES cars(id), 6 | PRIMARY KEY (car_no) 7 | ); 8 | -------------------------------------------------------------------------------- /test-fixtures/postgres/reviews.sql: -------------------------------------------------------------------------------- 1 | CREATE TABLE reviews ( 2 | id SERIAL, 3 | book_id INT NOT NULL, 4 | user_id INT NOT NULL, 5 | review_content VARCHAR(255), 6 | rating INT, 7 | published_date TIMESTAMP DEFAULT CURRENT_TIMESTAMP, 8 | PRIMARY KEY (id), 9 | FOREIGN KEY (book_id) REFERENCES books(id) ON DELETE CASCADE, 10 | FOREIGN KEY (user_id) REFERENCES users(id) ON DELETE CASCADE, 11 | CONSTRAINT published_date_rating_key UNIQUE (published_date, rating) 12 | ); 13 | -------------------------------------------------------------------------------- /test-fixtures/postgres/sample_table.sql: -------------------------------------------------------------------------------- 1 | CREATE TABLE sample_table ( 2 | id INT NOT NULL, 3 | PRIMARY KEY (id) 4 | ); 5 | -------------------------------------------------------------------------------- /test-fixtures/postgres/underscored_no_fields.sql: -------------------------------------------------------------------------------- 1 | CREATE TABLE underscored_no_fields ( 2 | id INT NOT NULL, 3 | sample_table_id INT NOT NULL, 4 | PRIMARY KEY (id), 5 | FOREIGN KEY (sample_table_id) REFERENCES sample_table(id) ON UPDATE CASCADE 6 | ); 7 | -------------------------------------------------------------------------------- /test-fixtures/postgres/user_books.sql: -------------------------------------------------------------------------------- 1 | CREATE TABLE user_books ( 2 | user_id INT NOT NULL, 3 | book_id INT NOT NULL, 4 | creation_date TIMESTAMP, 5 | update_date TIMESTAMP, 6 | FOREIGN KEY (user_id) REFERENCES users(id) ON UPDATE CASCADE, 7 | FOREIGN KEY (book_id) REFERENCES books(id) ON UPDATE CASCADE 8 | ); 9 | -------------------------------------------------------------------------------- /test-fixtures/postgres/users.sql: -------------------------------------------------------------------------------- 1 | CREATE TABLE users ( 2 | id SERIAL, 3 | username VARCHAR(25) NOT NULL, 4 | enabled BOOLEAN DEFAULT TRUE, 5 | last_login TIMESTAMP NOT NULL, 6 | PRIMARY KEY (id) 7 | ); 8 | -------------------------------------------------------------------------------- /test-utils/database-urls.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | DATABASE_URL_MONGODB_MIN: 'mongodb://localhost:27015', 3 | DATABASE_URL_MONGODB_MAX: 'mongodb://localhost:27016', 4 | DATABASE_URL_MSSQL_MIN: 'mssql://sa:forest2019:@localhost:1431/model', 5 | DATABASE_URL_MSSQL_MAX: 'mssql://sa:forest2019:@localhost:1432/model', 6 | DATABASE_URL_MYSQL_MIN: 'mysql://forest:secret@localhost:8998/lumber-sequelize-test', 7 | DATABASE_URL_MYSQL_MAX: 'mysql://forest:secret@localhost:8999/lumber-sequelize-test', 8 | DATABASE_URL_POSTGRESQL_MIN: 'postgres://forest:secret@localhost:54368/lumber-sequelize-test', 9 | DATABASE_URL_POSTGRESQL_MAX: 'postgres://forest:secret@localhost:54369/lumber-sequelize-test', 10 | }; 11 | -------------------------------------------------------------------------------- /test-utils/mongo-helper.js: -------------------------------------------------------------------------------- 1 | const { MongoClient } = require('mongodb'); 2 | const assert = require('assert'); 3 | 4 | const dbName = 'forest-test'; 5 | 6 | class MongoHelper { 7 | constructor(url) { 8 | this.url = url; 9 | } 10 | 11 | connect() { 12 | this.client = new MongoClient(this.url, { useNewUrlParser: true, useUnifiedTopology: true }); 13 | return new Promise((resolve) => { 14 | this.client.connect((err) => { 15 | assert.equal(null, err); 16 | this.db = this.client.db(dbName); 17 | resolve(this.db); 18 | }); 19 | }); 20 | } 21 | 22 | given(fixtures) { 23 | return Promise.all(Object.keys(fixtures).map((collectionName) => 24 | this.insertDocs(collectionName, fixtures[collectionName]))); 25 | } 26 | 27 | insertDocs(collectionName, docs) { 28 | return this.db 29 | .collection(collectionName) 30 | .insertMany(docs, { ordered: false }); 31 | } 32 | 33 | close() { 34 | this.db = null; 35 | return this.client.close(); 36 | } 37 | 38 | async dropAllCollections() { 39 | const collections = await this.db.listCollections().toArray(); 40 | return Promise.all(collections 41 | // System collections are not droppable… 42 | .filter(({ name }) => !name.startsWith('system.')) 43 | // …other collections are. 44 | .map(({ name }) => this.db.collection(name).drop())); 45 | } 46 | } 47 | 48 | module.exports = MongoHelper; 49 | -------------------------------------------------------------------------------- /test-utils/multiple-database-version-helper.js: -------------------------------------------------------------------------------- 1 | const { 2 | DATABASE_URL_MONGODB_MIN, 3 | DATABASE_URL_MONGODB_MAX, 4 | DATABASE_URL_MSSQL_MAX, 5 | DATABASE_URL_MSSQL_MIN, 6 | DATABASE_URL_MYSQL_MAX, 7 | DATABASE_URL_MYSQL_MIN, 8 | DATABASE_URL_POSTGRESQL_MAX, 9 | DATABASE_URL_POSTGRESQL_MIN, 10 | } = require('./database-urls'); 11 | 12 | const mongoDatabases = [{ 13 | version: '3.2', 14 | url: DATABASE_URL_MONGODB_MIN, 15 | }, { 16 | version: '4.2', 17 | url: DATABASE_URL_MONGODB_MAX, 18 | }]; 19 | 20 | const sqlDatabases = [{ 21 | dialect: 'mysql', 22 | version: '5.6', 23 | connectionUrl: DATABASE_URL_MYSQL_MIN, 24 | schema: 'public', 25 | }, { 26 | dialect: 'mysql', 27 | version: '8.0', 28 | connectionUrl: DATABASE_URL_MYSQL_MAX, 29 | schema: 'public', 30 | }, { 31 | dialect: 'postgres', 32 | version: '9.4', 33 | connectionUrl: DATABASE_URL_POSTGRESQL_MIN, 34 | schema: 'public', 35 | }, { 36 | dialect: 'postgres', 37 | version: '12.1', 38 | connectionUrl: DATABASE_URL_POSTGRESQL_MAX, 39 | schema: 'public', 40 | }, { 41 | dialect: 'mssql', 42 | version: '2017-CU8-ubuntu', 43 | connectionUrl: DATABASE_URL_MSSQL_MIN, 44 | schema: 'dbo', 45 | }, { 46 | dialect: 'mssql', 47 | version: '2019-GDR1-ubuntu-16.04', 48 | connectionUrl: DATABASE_URL_MSSQL_MAX, 49 | schema: 'dbo', 50 | }]; 51 | 52 | module.exports = { 53 | describeMongoDatabases(tests) { 54 | mongoDatabases.forEach((mongoDatabase) => { 55 | // eslint-disable-next-line jest/valid-describe 56 | describe(`using Mongo Database v${mongoDatabase.version}`, tests(mongoDatabase.url)); 57 | }); 58 | }, 59 | describeSequelizeDatabases(tests) { 60 | sqlDatabases.forEach((sqlDatabase) => { 61 | // eslint-disable-next-line jest/valid-describe 62 | describe(`using ${sqlDatabase.dialect} Database v${sqlDatabase.version}`, tests(sqlDatabase)); 63 | }); 64 | }, 65 | }; 66 | -------------------------------------------------------------------------------- /test/context/init.test.js: -------------------------------------------------------------------------------- 1 | const initContext = require('../../context/init'); 2 | const ApplicationContext = require('../../context/application-context'); 3 | 4 | describe('context > init', () => { 5 | it('should not throw error with an empty context', () => { 6 | expect.assertions(1); 7 | 8 | expect(() => initContext(new ApplicationContext())).not.toThrow(); 9 | }); 10 | }); 11 | -------------------------------------------------------------------------------- /test/deserializers/application-token.unit.test.js: -------------------------------------------------------------------------------- 1 | const applicationTokenDeserializer = require('../../deserializers/application-token'); 2 | 3 | describe('deserializers > ApplicationToken', () => { 4 | it('should deserialize the name and token of an application token', async () => { 5 | expect.assertions(1); 6 | 7 | const deserialized = await applicationTokenDeserializer.deserialize({ 8 | data: { 9 | id: '42', 10 | type: 'application-tokens', 11 | attributes: { 12 | name: 'the token', 13 | token: 'ABC', 14 | }, 15 | }, 16 | }); 17 | 18 | expect(deserialized).toStrictEqual({ 19 | id: '42', 20 | name: 'the token', 21 | token: 'ABC', 22 | }); 23 | }); 24 | }); 25 | -------------------------------------------------------------------------------- /test/handlebars/helpers/wrap-quotes.unit.test.js: -------------------------------------------------------------------------------- 1 | const wrapQuotes = require('../../../handlerbars/helpers/wrap-quotes'); 2 | 3 | describe('wrap-quotes', () => { 4 | it('should wrap', () => { 5 | expect.assertions(8); 6 | expect(wrapQuotes('notSpecialName')).toBe('\'notSpecialName\''); 7 | expect(wrapQuotes('notSpecialName12')).toBe('\'notSpecialName12\''); 8 | expect(wrapQuotes('_notSpecialName12')).toBe('\'_notSpecialName12\''); 9 | 10 | expect(wrapQuotes('special Name')).toBe('\'special Name\''); 11 | expect(wrapQuotes(' Name')).toBe('\' Name\''); 12 | expect(wrapQuotes('special-name')).toBe('\'special-name\''); 13 | expect(wrapQuotes('@specialname')).toBe('\'@specialname\''); 14 | expect(wrapQuotes('1234')).toBe('\'1234\''); 15 | }); 16 | }); 17 | -------------------------------------------------------------------------------- /test/serializers/application-token.unit.test.js: -------------------------------------------------------------------------------- 1 | const applicationTokenSerializer = require('../../serializers/application-token'); 2 | 3 | describe('serializers > ApplicationToken', () => { 4 | it('should serialize the name of an application token', () => { 5 | expect.assertions(1); 6 | 7 | const serialized = applicationTokenSerializer.serialize({ name: 'the token' }); 8 | 9 | expect(serialized).toStrictEqual({ 10 | data: { 11 | type: 'application-tokens', 12 | attributes: { 13 | name: 'the token', 14 | }, 15 | }, 16 | }); 17 | }); 18 | }); 19 | -------------------------------------------------------------------------------- /test/services/analyzer/mongo-collections-analyzer.test.js: -------------------------------------------------------------------------------- 1 | const analyzeMongoCollections = require('../../../services/analyzer/mongo-collections-analyzer'); 2 | const EmptyDatabaseError = require('../../../utils/errors/database/empty-database-error'); 3 | 4 | describe('services > mongoCollectionsAnalyzer', () => { 5 | describe('analyzeMongoCollections', () => { 6 | it('should return an EmptyDatabase error if connection doesn\'t have collections', async () => { 7 | expect.assertions(1); 8 | 9 | const databaseConnectionMock = { 10 | collections: jest.fn().mockResolvedValue([]), 11 | }; 12 | 13 | const error = new EmptyDatabaseError('no collections found'); 14 | 15 | await expect(analyzeMongoCollections(databaseConnectionMock)).rejects.toThrow(error); 16 | }); 17 | }); 18 | }); 19 | -------------------------------------------------------------------------------- /test/services/analyzer/sequelize-tables-analyzer.test.js: -------------------------------------------------------------------------------- 1 | const analyzeSequelizeTables = require('../../../services/analyzer/sequelize-tables-analyzer'); 2 | const EmptyDatabaseError = require('../../../utils/errors/database/empty-database-error'); 3 | 4 | describe('services > sequelizeTablesAnalyzer', () => { 5 | describe('analyzeSequelizeTables', () => { 6 | it('should return an EmptyDatabase error if connection doesn\'t have tables', async () => { 7 | expect.assertions(1); 8 | 9 | const databaseConnectionMock = { 10 | QueryTypes: {}, 11 | query: jest.fn().mockReturnValue([]), 12 | getQueryInterface: jest.fn().mockReturnValue({ 13 | showAllTables: jest.fn().mockResolvedValue([]), 14 | }), 15 | getDialect: jest.fn().mockReturnValue('mysql'), 16 | }; 17 | 18 | const error = new EmptyDatabaseError('no tables found'); 19 | 20 | await expect(analyzeSequelizeTables(databaseConnectionMock, {})).rejects.toThrow(error); 21 | }); 22 | }); 23 | }); 24 | -------------------------------------------------------------------------------- /test/services/command-generate-config-getter.test.js: -------------------------------------------------------------------------------- 1 | const CommandGenerateConfigGetter = require('../../services/command-generate-config-getter'); 2 | 3 | describe('services > command generate config getter', () => { 4 | describe('with a command with a "connectionUrl" option', () => { 5 | it('should require [dbConnectionUrl, dbSchema, ssl, mongodbSrv, appName, appHostname, appPort]', () => { 6 | expect.assertions(1); 7 | const commandGenerateConfigGetter = new CommandGenerateConfigGetter({ connectionUrl: 'postgres://forest:secret@localhost:5435/forest' }); 8 | expect(commandGenerateConfigGetter.getOptions()).toStrictEqual([ 9 | 'dbConnectionUrl', 10 | 'dbSchema', 11 | 'ssl', 12 | 'mongodbSrv', 13 | 'appName', 14 | 'appHostname', 15 | 'appPort', 16 | 'email', 17 | ]); 18 | }); 19 | }); 20 | 21 | describe('with a command with no options', () => { 22 | it('should require [dbDialect, dbName, dbHostname, dbPort, dbUser, dbPassword, dbSchema, email, ssl, mongodbSrv, appName, appHostname, appPort]', () => { 23 | expect.assertions(1); 24 | const commandGenerateConfigGetter = new CommandGenerateConfigGetter({ db: true }); 25 | expect(commandGenerateConfigGetter.getOptions()).toStrictEqual([ 26 | 'dbDialect', 27 | 'dbName', 28 | 'dbHostname', 29 | 'dbPort', 30 | 'dbUser', 31 | 'dbPassword', 32 | 'dbSchema', 33 | 'ssl', 34 | 'mongodbSrv', 35 | 'appName', 36 | 'appHostname', 37 | 'appPort', 38 | 'email', 39 | ]); 40 | }); 41 | }); 42 | }); 43 | -------------------------------------------------------------------------------- /test/services/error-handler.unit.test.js: -------------------------------------------------------------------------------- 1 | const LumberError = require('../../utils/lumber-error'); 2 | const ErrorHandler = require('../../services/error-handler'); 3 | 4 | describe('service > Oidc > ErrorHandler', () => { 5 | function setupTest() { 6 | const context = { 7 | terminator: { 8 | terminate: jest.fn(), 9 | }, 10 | chalk: { 11 | red: jest.fn().mockImplementation((value) => `${value}`), 12 | }, 13 | messages: { 14 | ERROR_UNEXPECTED: 'Unexpected', 15 | }, 16 | }; 17 | const errorHandler = new ErrorHandler(context); 18 | 19 | return { 20 | ...context, 21 | errorHandler, 22 | }; 23 | } 24 | describe('handle', () => { 25 | describe('when the error is unknown', () => { 26 | it('should display a message indicating that the error is unknown', () => { 27 | expect.assertions(1); 28 | 29 | const { errorHandler, terminator } = setupTest(); 30 | 31 | errorHandler.handle(new Error('The error')); 32 | 33 | expect(terminator.terminate).toHaveBeenCalledWith( 34 | 1, 35 | { logs: ['Unexpected The error'] }, 36 | ); 37 | }); 38 | }); 39 | 40 | describe('when the error is known', () => { 41 | it('should output the reason if provided', () => { 42 | expect.assertions(1); 43 | 44 | const { errorHandler, terminator } = setupTest(); 45 | 46 | errorHandler.handle(new LumberError('The error', undefined, { reason: 'The inner error' })); 47 | 48 | expect(terminator.terminate).toHaveBeenCalledWith( 49 | 1, 50 | { logs: ['The error: The inner error'] }, 51 | ); 52 | }); 53 | 54 | it('should output the possible solution if provided', () => { 55 | expect.assertions(1); 56 | 57 | const { errorHandler, terminator } = setupTest(); 58 | 59 | errorHandler.handle(new LumberError('The error', undefined, { possibleSolution: 'possible solution' })); 60 | 61 | expect(terminator.terminate).toHaveBeenCalledWith( 62 | 1, 63 | { logs: ['The error', 'possible solution'] }, 64 | ); 65 | }); 66 | }); 67 | }); 68 | }); 69 | -------------------------------------------------------------------------------- /test/services/mysql-table-constraints-getter.test.js: -------------------------------------------------------------------------------- 1 | const MysqlTableConstraintsGetter = require('../../services/analyzer/mysql-table-constraints-getter'); 2 | 3 | const databaseConnectionMock = { 4 | getQueryInterface: () => { 5 | }, 6 | }; 7 | 8 | describe('services > mysql table constraints getter', () => { 9 | describe('with no unique index', () => { 10 | it('should provide an empty unique constraint array', async () => { 11 | expect.assertions(1); 12 | const fixture = [ 13 | { columnType: 'PRIMARY_KEY', constraintName: 'PRIMARY' }, 14 | { columnType: 'FOREIGN_KEY', constraintName: 'test_fkey' }, 15 | { columnType: 'FOREIGN_KEY', constraintName: 'test_fkey' }, 16 | ]; 17 | const constraintGetter = new MysqlTableConstraintsGetter(databaseConnectionMock); 18 | const actual = constraintGetter.convertToUniqueIndexArray(fixture); 19 | 20 | expect(actual).toBeNull(); 21 | }); 22 | }); 23 | describe('with two simple unique indexes', () => { 24 | it('should provide an unique constraint array', async () => { 25 | expect.assertions(1); 26 | const fixture = [ 27 | { columnType: 'PRIMARY_KEY', constraintName: 'PRIMARY' }, 28 | { columnType: 'UNIQUE', constraintName: 'one_unique_index', columnName: 'one' }, 29 | { columnType: 'FOREIGN_KEY', constraintName: 'test_fkey' }, 30 | { columnType: 'UNIQUE', constraintName: 'anotherOne_unique_index', columnName: 'anotherOne' }, 31 | ]; 32 | const constraintGetter = new MysqlTableConstraintsGetter(databaseConnectionMock); 33 | const actual = constraintGetter.convertToUniqueIndexArray(fixture); 34 | const expected = [['one'], ['anotherOne']]; 35 | 36 | expect(actual).toStrictEqual(expected); 37 | }); 38 | }); 39 | describe('with two unique indexes', () => { 40 | it('should provide an unique constraint array', async () => { 41 | expect.assertions(1); 42 | const fixture = [ 43 | { columnType: 'PRIMARY_KEY', constraintName: 'PRIMARY' }, 44 | { columnType: 'UNIQUE', constraintName: 'left_unique_index', columnName: 'left' }, 45 | { columnType: 'FOREIGN_KEY', constraintName: 'test_fkey' }, 46 | { columnType: 'UNIQUE', constraintName: 'right_up_unique_index', columnName: 'right' }, 47 | { columnType: 'UNIQUE', constraintName: 'right_up_unique_index', columnName: 'up' }, 48 | ]; 49 | const constraintGetter = new MysqlTableConstraintsGetter(databaseConnectionMock); 50 | const actual = constraintGetter.convertToUniqueIndexArray(fixture); 51 | const expected = [['left'], ['right', 'up']]; 52 | 53 | expect(actual).toStrictEqual(expected); 54 | }); 55 | }); 56 | }); 57 | -------------------------------------------------------------------------------- /test/services/prompter/general-prompter.test.js: -------------------------------------------------------------------------------- 1 | const sinon = require('sinon'); 2 | const GeneralPrompter = require('../../../services/prompter/general-prompter'); 3 | const PrompterError = require('../../../services/prompter/prompter-error'); 4 | const Terminator = require('../../../utils/terminator'); 5 | 6 | describe('services > prompter > general prompter', () => { 7 | let requests = []; 8 | let program = {}; 9 | 10 | function resetParams() { 11 | requests = []; 12 | program = {}; 13 | } 14 | 15 | describe('getting the config from prompts', () => { 16 | describe('when a PromptError is thrown', () => { 17 | it('should terminate the process', async () => { 18 | expect.assertions(5); 19 | const promptError = new PrompterError('error message', ['logs']); 20 | 21 | const generalPrompter = new GeneralPrompter(requests, program); 22 | const userPromptsStub = sinon.stub(generalPrompter.userPrompt, 'handlePrompts').rejects(promptError); 23 | const applicationPromptsStub = sinon.stub(generalPrompter.applicationPrompt, 'handlePrompts').rejects(promptError); 24 | const projectPromptsStub = sinon.stub(generalPrompter.projectPrompt, 'handlePrompts').rejects(promptError); 25 | const databasePromptsStub = sinon.stub(generalPrompter.databasePrompt, 'handlePrompts').rejects(promptError); 26 | const terminateStub = sinon.stub(Terminator, 'terminate').resolves(true); 27 | 28 | await generalPrompter.getConfig(); 29 | 30 | const status = terminateStub.getCall(0).args[0]; 31 | const { 32 | errorCode, 33 | errorMessage, 34 | logs, 35 | context, 36 | } = terminateStub.getCall(0).args[1]; 37 | 38 | expect(status).toStrictEqual(1); 39 | expect(errorCode).toStrictEqual('unexpected_error'); 40 | expect(errorMessage).toStrictEqual('error message'); 41 | expect(logs).toStrictEqual(['logs']); 42 | expect(context).toBeUndefined(); 43 | 44 | resetParams(); 45 | userPromptsStub.restore(); 46 | applicationPromptsStub.restore(); 47 | projectPromptsStub.restore(); 48 | databasePromptsStub.restore(); 49 | terminateStub.restore(); 50 | }); 51 | }); 52 | }); 53 | }); 54 | -------------------------------------------------------------------------------- /test/services/prompter/prompt-utils.test.js: -------------------------------------------------------------------------------- 1 | const AbstractPrompt = require('../../../services/prompter/abstract-prompter'); 2 | 3 | describe('services > prompter > prompt utils', () => { 4 | const promptUtils = new AbstractPrompt(['requestedOption']); 5 | 6 | describe('when checking if an option is requested', () => { 7 | it('should return true if the option is present in the requests', () => { 8 | expect.assertions(1); 9 | expect(promptUtils.isOptionRequested('requestedOption')).toStrictEqual(true); 10 | }); 11 | 12 | it('should return false if the option is not present in the requests', () => { 13 | expect.assertions(1); 14 | expect(promptUtils.isOptionRequested('notRequestedOption')).toStrictEqual(false); 15 | }); 16 | }); 17 | }); 18 | -------------------------------------------------------------------------------- /test/templates/app/server.test.js: -------------------------------------------------------------------------------- 1 | const fs = require('fs'); 2 | 3 | describe('templates > app/server.hbs', () => { 4 | it('should use the PORT before the APPLICATION_PORT to be sure that the Heroku deployment works well', () => { 5 | expect.assertions(1); 6 | const template = fs.readFileSync('templates/app/server.hbs').toString(); 7 | expect(template).toStrictEqual(expect.stringMatching("process.env.PORT || process.env.APPLICATION_PORT || '3310'")); 8 | }); 9 | }); 10 | -------------------------------------------------------------------------------- /test/utils/errors.test.js: -------------------------------------------------------------------------------- 1 | const LumberError = require('../../utils/lumber-error'); 2 | const EmptyDatabaseError = require('../../utils/errors/database/empty-database-error'); 3 | 4 | describe('utils > errors', () => { 5 | describe('lumberError', () => { 6 | it('should be an instance of Error', () => { 7 | expect.assertions(1); 8 | 9 | const error = new LumberError(); 10 | 11 | expect(error).toBeInstanceOf(Error); 12 | }); 13 | 14 | it('should handle the details of an error', () => { 15 | expect.assertions(1); 16 | 17 | const error = new LumberError('an error', 'a detail'); 18 | 19 | expect(error.details).toStrictEqual('a detail'); 20 | }); 21 | }); 22 | 23 | describe('databaseAnalyzerErrors', () => { 24 | it('emptyDatabase should be of type LumberError', () => { 25 | expect.assertions(1); 26 | 27 | const error = new EmptyDatabaseError(); 28 | 29 | expect(error).toBeInstanceOf(LumberError); 30 | }); 31 | }); 32 | }); 33 | -------------------------------------------------------------------------------- /test/utils/fields.test.js: -------------------------------------------------------------------------------- 1 | const { isUnderscored } = require('../../utils/fields'); 2 | 3 | describe('utils > fields', () => { 4 | describe('without wrong parameters', () => { 5 | it('should return false', () => { 6 | expect.assertions(2); 7 | 8 | expect(isUnderscored(undefined)).toStrictEqual(false); 9 | expect(isUnderscored([])).toStrictEqual(false); 10 | }); 11 | }); 12 | 13 | describe('with only one field named `id`', () => { 14 | it('should return true', () => { 15 | expect.assertions(1); 16 | 17 | const fields = [{ 18 | nameColumn: 'id', 19 | }]; 20 | 21 | expect(isUnderscored(fields)).toStrictEqual(true); 22 | }); 23 | }); 24 | 25 | describe('with multiple fields', () => { 26 | describe('with underscored fields', () => { 27 | it('should return true', () => { 28 | expect.assertions(1); 29 | 30 | const fields = [{ 31 | nameColumn: 'id', 32 | }, { 33 | nameColumn: 'first_name', 34 | }]; 35 | 36 | expect(isUnderscored(fields)).toStrictEqual(true); 37 | }); 38 | }); 39 | 40 | describe('without underscored fields', () => { 41 | it('should return false', () => { 42 | expect.assertions(1); 43 | 44 | const fields = [{ 45 | nameColumn: 'id', 46 | }, { 47 | nameColumn: 'firstName', 48 | }]; 49 | 50 | expect(isUnderscored(fields)).toStrictEqual(false); 51 | }); 52 | }); 53 | }); 54 | }); 55 | -------------------------------------------------------------------------------- /test/utils/mongo-primitive-type.test.js: -------------------------------------------------------------------------------- 1 | const { ObjectId } = require('mongodb'); 2 | const { getMongooseTypeFromValue, isOfMongooseType } = require('../../utils/mongo-primitive-type'); 3 | 4 | describe('utils > Mongo Primitive Type', () => { 5 | describe('get primitive type from value', () => { 6 | it('should return `String`', () => { 7 | expect.assertions(1); 8 | expect(getMongooseTypeFromValue('string')).toStrictEqual('String'); 9 | }); 10 | 11 | it('should return `Number`', () => { 12 | expect.assertions(1); 13 | expect(getMongooseTypeFromValue(1)).toStrictEqual('Number'); 14 | }); 15 | 16 | it('should return `Boolean`', () => { 17 | expect.assertions(2); 18 | expect(getMongooseTypeFromValue(true)).toStrictEqual('Boolean'); 19 | expect(getMongooseTypeFromValue(false)).toStrictEqual('Boolean'); 20 | }); 21 | 22 | it('should return `Date`', () => { 23 | expect.assertions(1); 24 | expect(getMongooseTypeFromValue(new Date())).toStrictEqual('Date'); 25 | }); 26 | 27 | it('should return `Mongoose.Schema.Types.ObjectId`', () => { 28 | expect.assertions(1); 29 | expect(getMongooseTypeFromValue(new ObjectId('objectIdFake'))).toStrictEqual('Mongoose.Schema.Types.ObjectId'); 30 | }); 31 | 32 | it('should return null', () => { 33 | expect.assertions(4); 34 | expect(getMongooseTypeFromValue(null)).toBeNull(); 35 | expect(getMongooseTypeFromValue(undefined)).toBeNull(); 36 | expect(getMongooseTypeFromValue([])).toBeNull(); 37 | expect(getMongooseTypeFromValue({})).toBeNull(); 38 | }); 39 | }); 40 | 41 | describe('checking if value is has a primitive type', () => { 42 | it('should return true', () => { 43 | expect.assertions(5); 44 | expect(isOfMongooseType('string')).toStrictEqual(true); 45 | expect(isOfMongooseType(1)).toStrictEqual(true); 46 | expect(isOfMongooseType(true)).toStrictEqual(true); 47 | expect(isOfMongooseType(new Date())).toStrictEqual(true); 48 | expect(isOfMongooseType(new ObjectId('objectIdFake'))).toStrictEqual(true); 49 | }); 50 | 51 | it('should return false', () => { 52 | expect.assertions(4); 53 | expect(isOfMongooseType(undefined)).toStrictEqual(false); 54 | expect(isOfMongooseType(null)).toStrictEqual(false); 55 | expect(isOfMongooseType([])).toStrictEqual(false); 56 | expect(isOfMongooseType({})).toStrictEqual(false); 57 | }); 58 | }); 59 | }); 60 | -------------------------------------------------------------------------------- /test/utils/to-valid-package-name.test.js: -------------------------------------------------------------------------------- 1 | const toValidPackageName = require('../../utils/to-valid-package-name'); 2 | 3 | describe('utils > toValidPackageName', () => { 4 | it('should not convert valid package names', () => { 5 | expect.assertions(7); 6 | 7 | const names = [ 8 | 'some-package', 9 | 'example.com', 10 | 'under_score', 11 | '123numeric', 12 | '@npm/thingy', 13 | '@jane/foo.js', 14 | '-', 15 | ]; 16 | 17 | names.forEach((name) => { 18 | expect(toValidPackageName(name)).toStrictEqual(name); 19 | }); 20 | }); 21 | 22 | it('should convert invalid package names to valid package names', () => { 23 | expect.assertions(6); 24 | 25 | const names = [ 26 | { original: 'with space', expected: 'with-space' }, 27 | { original: ' with many space ', expected: 'with-many-space' }, 28 | { original: 'SHOULD BE LOWER CASE', expected: 'should-be-lower-case' }, 29 | { original: '--a¨*£%¨*+/.?:=›Îfl---z-', expected: 'a-z' }, 30 | { original: '∆™Ÿª', expected: 'lumber-project' }, 31 | { original: '', expected: 'lumber-project' }, 32 | ]; 33 | 34 | names.forEach((name) => { 35 | expect(toValidPackageName(name.original)).toStrictEqual(name.expected); 36 | }); 37 | }); 38 | }); 39 | -------------------------------------------------------------------------------- /utils/authenticator-helper.js: -------------------------------------------------------------------------------- 1 | const atob = require('atob'); 2 | const logger = require('../services/logger'); 3 | 4 | function parseJwt(token) { 5 | if (!token || !token.includes('.')) { return null; } 6 | try { 7 | const base64Url = token.split('.')[1]; 8 | const base64 = base64Url.replace(/-/g, '+').replace(/_/g, '/'); 9 | const jsonPayload = decodeURIComponent(atob(base64) 10 | .split('') 11 | .map((c) => `%${`00${c.charCodeAt(0).toString(16)}`.slice(-2)}`) 12 | .join('')); 13 | 14 | return JSON.parse(jsonPayload); 15 | } catch (error) { 16 | logger.error('Your session token is invalid.'); 17 | return null; 18 | } 19 | } 20 | 21 | module.exports = { 22 | parseJwt, 23 | }; 24 | -------------------------------------------------------------------------------- /utils/errors/application-token/unable-to-create-application-token-error.js: -------------------------------------------------------------------------------- 1 | const LumberError = require('../../lumber-error'); 2 | 3 | class UnableToCreateApplicationTokenError extends LumberError { 4 | /** 5 | * @param {{ 6 | * reason?: string; 7 | * possibleSolution?: string 8 | * }} [options] 9 | */ 10 | constructor(options) { 11 | super('Unable to create an application token on Forest Admin', undefined, options); 12 | this.name = 'UnableToCreateApplicationTokenError'; 13 | } 14 | } 15 | 16 | module.exports = UnableToCreateApplicationTokenError; 17 | -------------------------------------------------------------------------------- /utils/errors/database/empty-database-error.js: -------------------------------------------------------------------------------- 1 | const LumberError = require('../../lumber-error'); 2 | 3 | class EmptyDatabase extends LumberError {} 4 | 5 | module.exports = EmptyDatabase; 6 | -------------------------------------------------------------------------------- /utils/errors/dumper/incompatible-liana-for-update-error.js: -------------------------------------------------------------------------------- 1 | const LumberError = require('../../lumber-error'); 2 | 3 | class IncompatibleLianaForUpdateError extends LumberError { 4 | /** 5 | * @param {{ 6 | * reason?: string; 7 | * possibleSolution?: string 8 | * }} [options] 9 | */ 10 | constructor(reason) { 11 | super('The liana is incompatible for update', undefined, { reason }); 12 | this.name = 'IncompatibleLianaForUpdateError'; 13 | } 14 | } 15 | 16 | module.exports = IncompatibleLianaForUpdateError; 17 | -------------------------------------------------------------------------------- /utils/errors/dumper/invalid-lumber-project-structure-error.js: -------------------------------------------------------------------------------- 1 | const LumberError = require('../../lumber-error'); 2 | 3 | class InvalidLumberProjectStructureError extends LumberError { 4 | /** 5 | * @param {{ 6 | * reason?: string; 7 | * possibleSolution?: string 8 | * }} [options] 9 | */ 10 | constructor(path, reason) { 11 | super(`We are not able to detect a lumber project file architecture at this path: ${path}.`, undefined, { reason }); 12 | this.name = 'InvalidLumberProjectStructureError'; 13 | } 14 | } 15 | 16 | module.exports = InvalidLumberProjectStructureError; 17 | -------------------------------------------------------------------------------- /utils/fields.js: -------------------------------------------------------------------------------- 1 | const _ = require('lodash'); 2 | 3 | function isUnderscored(fields) { 4 | if (!fields || !fields.length) return false; 5 | 6 | if (fields.length === 1 && fields[0].nameColumn === 'id') return true; 7 | 8 | return fields.every((field) => field.nameColumn === _.snakeCase(field.nameColumn)) 9 | && fields.some((field) => field.nameColumn.includes('_')); 10 | } 11 | 12 | module.exports = { 13 | isUnderscored, 14 | }; 15 | -------------------------------------------------------------------------------- /utils/lumber-error.js: -------------------------------------------------------------------------------- 1 | class LumberError extends Error { 2 | /** 3 | * @param {string} message 4 | * @param {any} [details] 5 | * @param {{ 6 | * reason?: string; 7 | * possibleSolution?: string; 8 | * }} [options] 9 | */ 10 | constructor(message, details, options) { 11 | super(message); 12 | 13 | /** @public @readonly */ 14 | this.name = 'LumberError'; 15 | 16 | /** @public @readonly */ 17 | this.userMessage = message; 18 | 19 | /** @public @readonly */ 20 | this.details = details; 21 | 22 | /** @public @readonly */ 23 | this.reason = options && options.reason; 24 | 25 | /** @public @readonly */ 26 | this.possibleSolution = options && options.possibleSolution; 27 | 28 | Error.captureStackTrace(this, this.constructor); 29 | } 30 | } 31 | 32 | module.exports = LumberError; 33 | -------------------------------------------------------------------------------- /utils/messages.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | ERROR_UNEXPECTED: 'An unexpected error occurred. Please reach out for help in our Developers community (https://community.forestadmin.com/) or create a Github issue with following error:', 3 | ERROR_MISSING_PROJECT_NAME: 'Missing project name in the command.', 4 | HINT_MISSING_PROJECT_NAME: 'Please specify a project name. Type lumber help for more information.', 5 | ERROR_NOT_PARSABLE_CONNECTION_URL: 'Cannot parse the database dialect. Please, check the syntax of the database connection string.', 6 | HINT_DIRECTORY_ALREADY_EXISTS: 'Please retry with another project name.', 7 | }; 8 | -------------------------------------------------------------------------------- /utils/mongo-collections.js: -------------------------------------------------------------------------------- 1 | const P = require('bluebird'); 2 | 3 | function getCollectionName(collection) { 4 | return collection 5 | && collection.s 6 | && collection.s.namespace 7 | && collection.s.namespace.collection; 8 | } 9 | 10 | function isSystemCollection(collection) { 11 | const collectionName = getCollectionName(collection); 12 | return collectionName && collectionName.startsWith('system.'); 13 | } 14 | 15 | async function findCollectionMatchingSamples(databaseConnection, samples) { 16 | return P.mapSeries(databaseConnection.collections(), async (collection) => { 17 | if (isSystemCollection(collection)) return null; 18 | const count = await collection.countDocuments({ _id: { $in: samples } }); 19 | if (count) { 20 | return collection.s.namespace.collection; 21 | } 22 | return null; 23 | }).then((matches) => matches.filter((match) => match)); 24 | } 25 | 26 | function filterReferenceCollection(referencedCollections) { 27 | return referencedCollections.length === 1 ? referencedCollections[0] : null; 28 | } 29 | 30 | module.exports = { 31 | findCollectionMatchingSamples, 32 | isSystemCollection, 33 | filterReferenceCollection, 34 | getCollectionName, 35 | }; 36 | -------------------------------------------------------------------------------- /utils/mongo-primitive-type.js: -------------------------------------------------------------------------------- 1 | const { ObjectId } = require('mongodb'); 2 | 3 | /** 4 | * Retrieves simple mongoose type from value if detectable 5 | * Simple types are 'Date', 'Boolean', 'Number', 'String', 'Mongoose.Schema.Types.ObjectId' 6 | * @param value 7 | * @returns {string|null} return 8 | */ 9 | /* istanbul ignore next */ 10 | function getMongooseTypeFromValue(value) { 11 | if (typeof value === 'object' && value instanceof Date) { 12 | return 'Date'; 13 | } 14 | 15 | if (typeof value === 'object' && value instanceof ObjectId) { 16 | return 'Mongoose.Schema.Types.ObjectId'; 17 | } 18 | 19 | switch (typeof value) { 20 | case 'boolean': 21 | return 'Boolean'; 22 | case 'number': 23 | return 'Number'; 24 | case 'string': 25 | return 'String'; 26 | default: 27 | return null; 28 | } 29 | } 30 | 31 | /** 32 | * Checks if the value corresponds to a mongoose type 33 | * @param value 34 | * @returns {boolean} 35 | */ 36 | /* istanbul ignore next */ 37 | function isOfMongooseType(value) { 38 | return !!getMongooseTypeFromValue(value); 39 | } 40 | 41 | module.exports = { 42 | getMongooseTypeFromValue, 43 | isOfMongooseType, 44 | }; 45 | -------------------------------------------------------------------------------- /utils/regexs.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | EMAIL_REGEX: /^(([^<>()[\]\\.,;:\s@"]+(\.[^<>()[\]\\.,;:\s@"]+)*)|(".+"))@((\[[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}])|(([a-zA-Z\-0-9]+\.)+[a-zA-Z]{2,}))$/, 3 | // NOTICE: The forest password should be composed of digit, at least on capital letter 4 | // and one lower case letter. It also accepts special characters except whitespaces. 5 | PASSWORD_REGEX: /^(?=\S*?[A-Z])(?=\S*?[a-z])((?=\S*?[0-9]))\S{8,}$/, 6 | }; 7 | -------------------------------------------------------------------------------- /utils/strings.js: -------------------------------------------------------------------------------- 1 | const _ = require('lodash'); 2 | 3 | const RESERVED_WORDS = [ 4 | 'abstract', 'await', 'boolean', 'break', 5 | 'byte', 'case', 'catch', 'char', 6 | 'class', 'const', 'continue', 'debugger', 7 | 'default', 'delete', 'do', 'double', 8 | 'else', 'enum', 'export', 'extends', 9 | 'false', 'final', 'finally', 'float', 10 | 'for', 'function', 'goto', 'if', 11 | 'implements', 'import', 'in', 'instanceof', 12 | 'int', 'interface', 'let', 'long', 13 | 'module', 'native', 'new', 'null', 14 | 'package', 'private', 'protected', 'public', 15 | 'return', 'short', 'static', 'super', 16 | 'switch', 'synchronized', 'this', 'throw', 17 | 'throws', 'transient', 'true', 'try', 18 | 'typeof', 'undefined', 'var', 'void', 19 | 'volatile', 'while', 'with', 'yield', 20 | ]; 21 | 22 | module.exports = { 23 | pascalCase(input) { 24 | return _.chain(input).camelCase().upperFirst().value(); 25 | }, 26 | 27 | camelCase(input) { 28 | return _.camelCase(input); 29 | }, 30 | 31 | isReservedWord(input) { 32 | return RESERVED_WORDS.includes(_.toLower(input)); 33 | }, 34 | 35 | transformToSafeString(input) { 36 | if (/^[\d]/g.exec(input)) { 37 | return `model${input}`; 38 | } 39 | // NOTICE: add dash to get proper snake/pascal case 40 | if (this.isReservedWord(input)) { 41 | return `model${_.upperFirst(input)}`; 42 | } 43 | return input; 44 | }, 45 | 46 | transformToCamelCaseSafeString(input) { 47 | return this.camelCase(this.transformToSafeString(input)); 48 | }, 49 | }; 50 | -------------------------------------------------------------------------------- /utils/terminator.js: -------------------------------------------------------------------------------- 1 | const logger = require('../services/logger'); 2 | const eventSender = require('../services/event-sender'); 3 | 4 | /** 5 | * @typedef {{ 6 | * errorCode: string; 7 | * errorMessage: string; 8 | * context: any; 9 | * }} DetailedLog 10 | * 11 | * @typedef {{ 12 | * logs: string[] 13 | * }} MultipleMessages 14 | */ 15 | 16 | module.exports = { 17 | /** 18 | * @param {number} status 19 | * @param {DetailedLog | MultipleMessages | DetailedLog & MultipleMessages} log 20 | */ 21 | async terminate(status, { 22 | errorCode, errorMessage, logs, context, 23 | }) { 24 | if (status !== 0 && logger.spinner) { 25 | logger.spinner.fail(); 26 | } 27 | if (logs.length) { 28 | logger.error(...logs); 29 | } 30 | if (errorCode) { 31 | await eventSender.notifyError(errorCode, errorMessage, context); 32 | } else { 33 | await eventSender.notifyError(); 34 | } 35 | 36 | process.exit(status); 37 | }, 38 | }; 39 | -------------------------------------------------------------------------------- /utils/to-valid-package-name.js: -------------------------------------------------------------------------------- 1 | const validate = require('validate-npm-package-name'); 2 | 3 | module.exports = function toValidPackageName(packageName) { 4 | function isValid(name) { 5 | const { validForNewPackages } = validate(name); 6 | return validForNewPackages; 7 | } 8 | 9 | if (!isValid(packageName)) { 10 | // NOTICE: Create an always valid package name (disallow almost everything) 11 | const validPackageName = packageName.toLowerCase() 12 | // Remove all non "a-z", "0-9", "-" characters with hyphen. 13 | .replace(/[^a-z0-9\\-]/g, '-') 14 | // Remove hyphen sequence (> 1). 15 | .replace(/-{2,}/g, '-') 16 | // Remove leading and trailing hyphen. 17 | .replace(/^-|-$/g, ''); 18 | 19 | // NOTICE: Return 'lumber-project' if sanitized package name is still not valid. 20 | return isValid(validPackageName) ? validPackageName : 'lumber-project'; 21 | } 22 | 23 | return packageName; 24 | }; 25 | --------------------------------------------------------------------------------