├── .github ├── FUNDING.yml └── workflows │ └── ci.yml ├── .gitignore ├── .npmignore ├── CHANGELOG.md ├── README.md ├── jest.config.js ├── package-lock.json ├── package.json ├── src ├── bin │ ├── cli.ts │ └── utils.ts ├── builders │ ├── Builder.ts │ ├── ModelBuilder.ts │ ├── index.ts │ └── utils.ts ├── config │ ├── IConfig.ts │ └── index.ts ├── connection │ ├── createConnection.ts │ └── index.ts ├── dialects │ ├── AssociationsParser.ts │ ├── Dialect.ts │ ├── DialectMSSQL.ts │ ├── DialectMariaDB.ts │ ├── DialectMySQL.ts │ ├── DialectPostgres.ts │ ├── DialectSQLite.ts │ ├── index.ts │ └── utils.ts ├── index.ts ├── lint │ ├── Linter.ts │ ├── eslintDefaultConfig.ts │ └── index.ts └── tests │ ├── environment.ts │ ├── integration │ ├── ITestMetadata.ts │ ├── TestRunner.ts │ ├── associations.csv │ ├── docker-remove-containers.sh │ ├── mariadb │ │ ├── docker-start-mariadb.sh │ │ ├── geometries.ts │ │ ├── mariadb.test.ts │ │ └── queries.ts │ ├── mssql │ │ ├── docker-start-mssql.sh │ │ ├── mssql.test.ts │ │ └── queries.ts │ ├── mysql │ │ ├── docker-start-mysql.sh │ │ ├── geometries.ts │ │ ├── mysql.test.ts │ │ └── queries.ts │ ├── postgres │ │ ├── docker-start-postgres.sh │ │ ├── postgres.test.ts │ │ └── queries.ts │ ├── sqlite │ │ ├── queries.ts │ │ └── sqlite.test.ts │ └── test-env.sh │ └── unit │ └── builders.test.ts └── tsconfig.json /.github/FUNDING.yml: -------------------------------------------------------------------------------- 1 | github: [spinlud] -------------------------------------------------------------------------------- /.github/workflows/ci.yml: -------------------------------------------------------------------------------- 1 | name: ci 2 | on: [push, workflow_dispatch] 3 | 4 | jobs: 5 | install_build: 6 | runs-on: ubuntu-latest 7 | steps: 8 | - name: Check out repository code 9 | uses: actions/checkout@v2 10 | 11 | - name: Setup node 12 | uses: actions/setup-node@v2 13 | with: 14 | node-version: '16' 15 | 16 | - name: Cache node_modules 17 | id: cache-node-modules 18 | uses: actions/cache@v2 19 | with: 20 | path: node_modules 21 | key: node-modules-${{ hashFiles('package-lock.json') }} 22 | 23 | - name: Install 24 | run: npm install 25 | 26 | - name: Build 27 | run: npm run build 28 | 29 | test_mysql: 30 | runs-on: ubuntu-latest 31 | needs: install_build 32 | steps: 33 | - name: Check out repository code 34 | uses: actions/checkout@v2 35 | 36 | - name: Setup node 37 | uses: actions/setup-node@v2 38 | with: 39 | node-version: '16' 40 | 41 | - name: Restore node_modules 42 | id: cache-node-modules 43 | uses: actions/cache@v2 44 | with: 45 | path: node_modules 46 | key: node-modules-${{ hashFiles('package-lock.json') }} 47 | 48 | - name: Test MySQL 5 49 | run: npm run test-mysql 50 | env: 51 | DOCKER_MYSQL_TAG: 5 52 | 53 | - name: Test MySQL 8 54 | run: npm run test-mysql 55 | env: 56 | DOCKER_MYSQL_TAG: 8 57 | 58 | test_postgres: 59 | runs-on: ubuntu-latest 60 | needs: install_build 61 | steps: 62 | - name: Check out repository code 63 | uses: actions/checkout@v2 64 | 65 | - name: Setup node 66 | uses: actions/setup-node@v2 67 | with: 68 | node-version: '16' 69 | 70 | - name: Restore node_modules 71 | id: cache-node-modules 72 | uses: actions/cache@v2 73 | with: 74 | path: node_modules 75 | key: node-modules-${{ hashFiles('package-lock.json') }} 76 | 77 | - name: Test Postgres 11 78 | run: npm run test-postgres 79 | env: 80 | DOCKER_POSTGRES_TAG: 11 81 | 82 | - name: Test Postgres 14 83 | run: npm run test-postgres 84 | env: 85 | DOCKER_POSTGRES_TAG: 14 86 | 87 | - name: Test Postgres 16 88 | run: npm run test-postgres 89 | env: 90 | DOCKER_POSTGRES_TAG: 16 91 | 92 | test_mariadb: 93 | runs-on: ubuntu-latest 94 | needs: install_build 95 | steps: 96 | - name: Check out repository code 97 | uses: actions/checkout@v2 98 | 99 | - name: Setup node 100 | uses: actions/setup-node@v2 101 | with: 102 | node-version: '16' 103 | 104 | - name: Restore node_modules 105 | id: cache-node-modules 106 | uses: actions/cache@v2 107 | with: 108 | path: node_modules 109 | key: node-modules-${{ hashFiles('package-lock.json') }} 110 | 111 | - name: Test MariaDB 10 112 | run: npm run test-mariadb 113 | env: 114 | DOCKER_MARIADB_TAG: 10 115 | 116 | - name: Test MariaDB 11 117 | run: npm run test-mariadb 118 | env: 119 | DOCKER_MARIADB_TAG: 11 120 | 121 | test_mssql: 122 | runs-on: ubuntu-latest 123 | needs: install_build 124 | steps: 125 | - name: Check out repository code 126 | uses: actions/checkout@v2 127 | 128 | - name: Setup node 129 | uses: actions/setup-node@v2 130 | with: 131 | node-version: '16' 132 | 133 | - name: Restore node_modules 134 | id: cache-node-modules 135 | uses: actions/cache@v2 136 | with: 137 | path: node_modules 138 | key: node-modules-${{ hashFiles('package-lock.json') }} 139 | 140 | - name: Test MSSQL 2019 141 | run: npm run test-mssql 142 | env: 143 | DOCKER_MSSQL_TAG: 2019-latest 144 | 145 | - name: Test MSSQL 2022 146 | run: npm run test-mssql 147 | env: 148 | DOCKER_MSSQL_TAG: 2022-latest 149 | 150 | test_sqlite: 151 | runs-on: ubuntu-latest 152 | needs: install_build 153 | steps: 154 | - name: Check out repository code 155 | uses: actions/checkout@v2 156 | 157 | - name: Setup node 158 | uses: actions/setup-node@v2 159 | with: 160 | node-version: '16' 161 | 162 | - name: Restore node_modules 163 | id: cache-node-modules 164 | uses: actions/cache@v2 165 | with: 166 | path: node_modules 167 | key: node-modules-${{ hashFiles('package-lock.json') }} 168 | 169 | - name: Test SQLite 170 | run: npm run test-sqlite 171 | 172 | publish: 173 | runs-on: ubuntu-latest 174 | needs: [test_mysql, test_postgres, test_mariadb, test_mssql, test_sqlite] 175 | steps: 176 | - name: Check out repository code 177 | uses: actions/checkout@v2 178 | 179 | - name: Setup node 180 | uses: actions/setup-node@v2 181 | with: 182 | node-version: '16' 183 | 184 | - name: Restore node_modules 185 | id: cache-node-modules 186 | uses: actions/cache@v2 187 | with: 188 | path: node_modules 189 | key: node-modules-${{ hashFiles('package-lock.json') }} 190 | 191 | - name: Build 192 | run: npm run build 193 | 194 | - name: Publish 195 | # if: contains(github.ref, 'release') # Tag commit with 'release' string to publish 196 | run: | 197 | npm set registry "https://registry.npmjs.org" 198 | npm set //registry.npmjs.org/:_authToken ${{ secrets.NPM_PUBLISH_TOKEN }} 199 | npm publish 200 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | **/build 2 | **/output 3 | **/tmp 4 | **/*.tgz 5 | /prova.ts 6 | /output-models 7 | /sqlite-db 8 | **/memory 9 | src/tmp/** 10 | ./models/** 11 | src/tests/integration/output-models 12 | 13 | # Created by https://www.gitignore.io/api/node,macos 14 | # Edit at https://www.gitignore.io/?templates=node,macos 15 | 16 | ### Webstorm ### 17 | **/*.iml 18 | **/*.idea 19 | 20 | ### macOS ### 21 | # General 22 | .DS_Store 23 | .AppleDouble 24 | .LSOverride 25 | 26 | # Icon must end with two \r 27 | Icon 28 | 29 | # Thumbnails 30 | ._* 31 | 32 | # Files that might appear in the root of a volume 33 | .DocumentRevisions-V100 34 | .fseventsd 35 | .Spotlight-V100 36 | .TemporaryItems 37 | .Trashes 38 | .VolumeIcon.icns 39 | .com.apple.timemachine.donotpresent 40 | 41 | # Directories potentially created on remote AFP share 42 | .AppleDB 43 | .AppleDesktop 44 | Network Trash Folder 45 | Temporary Items 46 | .apdisk 47 | 48 | ### Node ### 49 | # Logs 50 | logs 51 | *.log 52 | npm-debug.log* 53 | yarn-debug.log* 54 | yarn-error.log* 55 | lerna-debug.log* 56 | 57 | # Diagnostic reports (https://nodejs.org/api/report.html) 58 | report.[0-9]*.[0-9]*.[0-9]*.[0-9]*.json 59 | 60 | # Runtime data 61 | pids 62 | *.pid 63 | *.seed 64 | *.pid.lock 65 | 66 | # Directory for instrumented libs generated by jscoverage/JSCover 67 | lib-cov 68 | 69 | # Coverage directory used by tools like istanbul 70 | coverage 71 | *.lcov 72 | 73 | # nyc test coverage 74 | .nyc_output 75 | 76 | # Grunt intermediate storage (https://gruntjs.com/creating-plugins#storing-task-files) 77 | .grunt 78 | 79 | # Bower dependency directory (https://bower.io/) 80 | bower_components 81 | 82 | # node-waf configuration 83 | .lock-wscript 84 | 85 | # Compiled binary addons (https://nodejs.org/api/addons.html) 86 | build/Release 87 | 88 | # Dependency directories 89 | node_modules/ 90 | jspm_packages/ 91 | 92 | # TypeScript v1 declaration files 93 | typings/ 94 | 95 | # TypeScript cache 96 | *.tsbuildinfo 97 | 98 | # Optional npm cache directory 99 | .npm 100 | 101 | # Optional eslint cache 102 | .eslintcache 103 | 104 | # Optional REPL history 105 | .node_repl_history 106 | 107 | # Output of 'npm pack' 108 | *.tgz 109 | 110 | # Yarn Integrity file 111 | .yarn-integrity 112 | 113 | # dotenv environment variables file 114 | .env 115 | .env.test 116 | 117 | # parcel-bundler cache (https://parceljs.org/) 118 | .cache 119 | 120 | # next.js build output 121 | .next 122 | 123 | # nuxt.js build output 124 | .nuxt 125 | 126 | # rollup.js default build output 127 | dist/ 128 | 129 | # Uncomment the public line if your project uses Gatsby 130 | # https://nextjs.org/blog/next-9-1#public-directory-support 131 | # https://create-react-app.dev/docs/using-the-public-folder/#docsNav 132 | # public 133 | 134 | # Storybook build outputs 135 | .out 136 | .storybook-out 137 | 138 | # vuepress build output 139 | .vuepress/dist 140 | 141 | # Serverless directories 142 | .serverless/ 143 | 144 | # FuseBox cache 145 | .fusebox/ 146 | 147 | # DynamoDB Local files 148 | .dynamodb/ 149 | 150 | # Temporary folders 151 | tmp/ 152 | temp/ 153 | 154 | # End of https://www.gitignore.io/api/node,macos 155 | -------------------------------------------------------------------------------- /.npmignore: -------------------------------------------------------------------------------- 1 | **/build/tests 2 | **/src 3 | **/output 4 | **/*.tgz 5 | **/prova.ts 6 | **/output-models 7 | **/sqlite-db 8 | **/memory 9 | **/tmp 10 | /jest.config.js 11 | 12 | # Created by https://www.gitignore.io/api/node,macos 13 | # Edit at https://www.gitignore.io/?templates=node,macos 14 | 15 | ### Webstorm ### 16 | **/*.iml 17 | **/*.idea 18 | 19 | ### macOS ### 20 | # General 21 | .DS_Store 22 | .AppleDouble 23 | .LSOverride 24 | 25 | # Icon must end with two \r 26 | Icon 27 | 28 | # Thumbnails 29 | ._* 30 | 31 | # Files that might appear in the root of a volume 32 | .DocumentRevisions-V100 33 | .fseventsd 34 | .Spotlight-V100 35 | .TemporaryItems 36 | .Trashes 37 | .VolumeIcon.icns 38 | .com.apple.timemachine.donotpresent 39 | 40 | # Directories potentially created on remote AFP share 41 | .AppleDB 42 | .AppleDesktop 43 | Network Trash Folder 44 | Temporary Items 45 | .apdisk 46 | 47 | ### Node ### 48 | # Logs 49 | logs 50 | *.log 51 | npm-debug.log* 52 | yarn-debug.log* 53 | yarn-error.log* 54 | lerna-debug.log* 55 | 56 | # Diagnostic reports (https://nodejs.org/api/report.html) 57 | report.[0-9]*.[0-9]*.[0-9]*.[0-9]*.json 58 | 59 | # Runtime data 60 | pids 61 | *.pid 62 | *.seed 63 | *.pid.lock 64 | 65 | # Directory for instrumented libs generated by jscoverage/JSCover 66 | lib-cov 67 | 68 | # Coverage directory used by tools like istanbul 69 | coverage 70 | *.lcov 71 | 72 | # nyc test coverage 73 | .nyc_output 74 | 75 | # Grunt intermediate storage (https://gruntjs.com/creating-plugins#storing-task-files) 76 | .grunt 77 | 78 | # Bower dependency directory (https://bower.io/) 79 | bower_components 80 | 81 | # node-waf configuration 82 | .lock-wscript 83 | 84 | # Compiled binary addons (https://nodejs.org/api/addons.html) 85 | build/Release 86 | 87 | # Dependency directories 88 | node_modules/ 89 | jspm_packages/ 90 | 91 | # TypeScript v1 declaration files 92 | typings/ 93 | 94 | # TypeScript cache 95 | *.tsbuildinfo 96 | 97 | # Optional npm cache directory 98 | .npm 99 | 100 | # Optional eslint cache 101 | .eslintcache 102 | 103 | # Optional REPL history 104 | .node_repl_history 105 | 106 | # Output of 'npm pack' 107 | *.tgz 108 | 109 | # Yarn Integrity file 110 | .yarn-integrity 111 | 112 | # dotenv environment variables file 113 | .env 114 | .env.test 115 | 116 | # parcel-bundler cache (https://parceljs.org/) 117 | .cache 118 | 119 | # next.js build output 120 | .next 121 | 122 | # nuxt.js build output 123 | .nuxt 124 | 125 | # rollup.js default build output 126 | dist/ 127 | 128 | # Uncomment the public line if your project uses Gatsby 129 | # https://nextjs.org/blog/next-9-1#public-directory-support 130 | # https://create-react-app.dev/docs/using-the-public-folder/#docsNav 131 | # public 132 | 133 | # Storybook build outputs 134 | .out 135 | .storybook-out 136 | 137 | # vuepress build output 138 | .vuepress/dist 139 | 140 | # Serverless directories 141 | .serverless/ 142 | 143 | # FuseBox cache 144 | .fusebox/ 145 | 146 | # DynamoDB Local files 147 | .dynamodb/ 148 | 149 | # Temporary folders 150 | tmp/ 151 | temp/ 152 | 153 | # End of https://www.gitignore.io/api/node,macos 154 | -------------------------------------------------------------------------------- /CHANGELOG.md: -------------------------------------------------------------------------------- 1 | # Change Log 2 | 3 | --- 4 | 5 | ### 10.1.0 6 | #### Breaking changes: 7 | 8 | #### Minor changes and bug fixes: 9 | * Libraries update 10 | * Changed node.js version to v14.21.3 to test latest versions of `typescript` using `jest` 11 | 12 | --- 13 | 14 | ### 10.0.0 15 | #### Breaking changes: 16 | * Typescript: `4.9.4` -> `5.0.2` 17 | * MySQL driver: `mysql2@2.3.3` -> `mysql2@3.2.0` 18 | 19 | #### Minor changes and bug fixes: 20 | * Added change log file 21 | 22 | --- 23 | 24 | ### 9.0.3 25 | #### Breaking changes: 26 | * NA 27 | #### Minor changes and bug fixes: 28 | * NA 29 | 30 | --- -------------------------------------------------------------------------------- /jest.config.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | roots: ['/src/tests'], 3 | transform: { 4 | '^.+\\.tsx?$': 'ts-jest' 5 | }, 6 | testPathIgnorePatterns: [ 7 | 'environment.ts', 8 | 'testsData.ts', 9 | 'testRunner', 10 | ], 11 | verbose: true, 12 | }; 13 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "sequelize-typescript-generator", 3 | "version": "12.0.1", 4 | "description": "Automatically generates typescript models compatible with sequelize-typescript library (https://www.npmjs.com/package/sequelize-typescript) directly from your source database.", 5 | "main": "build/index.js", 6 | "types": "build/index.d.ts", 7 | "bin": { 8 | "stg": "build/bin/cli.js" 9 | }, 10 | "scripts": { 11 | "clean": "rm -fr build", 12 | "lint": "eslint --fix --ext .ts output/*.ts", 13 | "build": "npm run clean && tsc", 14 | "audit": "npm audit", 15 | "docker-remove-containers": "./src/tests/integration/docker-remove-containers.sh", 16 | "docker-start-mysql": "./src/tests/integration/mysql/docker-start-mysql.sh", 17 | "docker-start-mariadb": "./src/tests/integration/mariadb/docker-start-mariadb.sh", 18 | "docker-start-postgres": "./src/tests/integration/postgres/docker-start-postgres.sh", 19 | "docker-start-mssql": "./src/tests/integration/mssql/docker-start-mssql.sh", 20 | "test-mysql": "npm run docker-remove-containers && npm run docker-start-mysql && jest src/tests/integration/mysql", 21 | "test-mariadb": "npm run docker-remove-containers && npm run docker-start-mariadb && jest src/tests/integration/mariadb", 22 | "test-postgres": "npm run docker-remove-containers && npm run docker-start-postgres && jest src/tests/integration/postgres", 23 | "test-mssql": "npm run docker-remove-containers && npm run docker-start-mssql && jest src/tests/integration/mssql", 24 | "test-sqlite": "npm run docker-remove-containers && jest src/tests/integration/sqlite", 25 | "test": "npm run test-sqlite && npm run test-mysql && npm run test-mariadb && npm run test-postgres && npm run test-mssql", 26 | "dev": "npx ts-node-dev src/bin/cli.ts -D mysql -u root -x mysql -d dbtest --dialect-options '{\"timezone\": \"local\"}' --case const:camel --clean --logs", 27 | "usage": "npx ts-node-dev src/bin/cli.ts --help" 28 | }, 29 | "author": "Ludovico Fabbri", 30 | "license": "ISC", 31 | "repository": "https://github.com/spinlud/sequelize-typescript-generator.git", 32 | "bugs": { 33 | "url": "https://github.com/spinlud/sequelize-typescript-generator.git/issues" 34 | }, 35 | "homepage": "https://github.com/spinlud/sequelize-typescript-generator.git#readme", 36 | "devDependencies": { 37 | "@types/bluebird": "^3.5.42", 38 | "@types/estree": "^1.0.6", 39 | "@types/jest": "^29.5.14", 40 | "@types/node": "^20.11.28", 41 | "@types/pluralize": "0.0.33", 42 | "@types/validator": "^13.12.2", 43 | "@types/yargs": "^17.0.33", 44 | "jest": "^29.7.0", 45 | "mariadb": "^3.4.0", 46 | "mysql2": "^3.12.0", 47 | "pg": "^8.13.1", 48 | "pg-hstore": "^2.3.4", 49 | "reflect-metadata": "^0.2.1", 50 | "sqlite3": "^5.1.7", 51 | "tedious": "^18.6.1", 52 | "ts-jest": "^29.2.5", 53 | "ts-node-dev": "^2.0.0", 54 | "wkx": "^0.5.0" 55 | }, 56 | "dependencies": { 57 | "@types/eslint": "^8.56.5", 58 | "@typescript-eslint/parser": "^7.2.0", 59 | "change-case": "^4.1.2", 60 | "eslint": "^8.57.0", 61 | "pluralize": "^8.0.0", 62 | "sequelize": "^6.37.5", 63 | "sequelize-typescript": "^2.1.6", 64 | "typescript": "^5.7.2", 65 | "yargs": "^17.7.2" 66 | }, 67 | "peerDependencies": { 68 | "typescript": "^5.0.4" 69 | }, 70 | "keywords": [ 71 | "sequelize", 72 | "sequelize-typescript", 73 | "sequelize-typescript-generator", 74 | "sequelize-typescript-auto", 75 | "knex", 76 | "bookshelf", 77 | "typescript", 78 | "javascript", 79 | "sql", 80 | "mysql", 81 | "postgres", 82 | "mariadb", 83 | "mssql", 84 | "sqlite", 85 | "orm", 86 | "node", 87 | "npm" 88 | ] 89 | } 90 | -------------------------------------------------------------------------------- /src/bin/cli.ts: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env node 2 | 3 | import yargs from 'yargs'; 4 | import { ModelBuilder } from '../builders'; 5 | import { 6 | defaultOutputDir, 7 | aliasesMap, 8 | validateArgs, 9 | buildConfig, 10 | buildDialect, 11 | } from './utils'; 12 | 13 | process.on('unhandledRejection', (reason, promise) => { 14 | console.error(reason, promise); 15 | process.exit(1); 16 | }); 17 | 18 | export const cli = async (): Promise => { 19 | let usage = `Usage: stg -D -d [database] -u [username] -x [password] `; 20 | usage += `-h [host] -p [port] -o [out-dir] -s [schema] -a [associations-file]`; 21 | usage += `-t [tables] -T [skip-tables] -V [no-views] -i [indices] -C [case] -S [storage] -L [lint-file] `; 22 | usage += `-l [ssl] -r [protocol] -n [dialect-options] -c [clean] -g [logs]`; 23 | 24 | const {argv} = yargs 25 | .usage(usage) 26 | .demand(['dialect']) 27 | .option('h', { 28 | alias: aliasesMap.HOST, 29 | string: true, 30 | describe: `Database IP/hostname`, 31 | }) 32 | .option('p', { 33 | alias: aliasesMap.PORT, 34 | number: true, 35 | describe: `Database port. Defaults: \n - MySQL/MariaDB: 3306 \n - Postgres: 5432 \n - MSSQL: 1433`, 36 | }) 37 | .option('d', { 38 | alias: aliasesMap.DATABASE, 39 | string: true, 40 | describe: `Database name`, 41 | }) 42 | .option('s', { 43 | alias: aliasesMap.SCHEMA, 44 | string: true, 45 | describe: `Schema name (Postgres only). Default: \n - public`, 46 | }) 47 | .option('D', { 48 | alias: aliasesMap.DIALECT, 49 | string: true, 50 | describe: `Dialect: \n - postgres \n - mysql \n - mariadb \n - sqlite \n - mssql`, 51 | }) 52 | .option('u', { 53 | alias: aliasesMap.USERNAME, 54 | string: true, 55 | describe: `Database username`, 56 | }) 57 | .option('x', { 58 | alias: aliasesMap.PASSWORD, 59 | string: true, 60 | describe: `Database password`, 61 | }) 62 | .option('t', { 63 | alias: aliasesMap.TABLES, 64 | string: true, 65 | describe: `Comma-separated names of tables to process`, 66 | }) 67 | .option('T', { 68 | alias: aliasesMap.SKIP_TABLES, 69 | string: true, 70 | describe: `Comma-separated names of tables to skip`, 71 | }) 72 | .option('i', { 73 | alias: aliasesMap.INDICES, 74 | boolean: true, 75 | describe: `Include index annotations in the generated models`, 76 | }) 77 | .option('o', { 78 | alias: aliasesMap.OUTPUT_DIR, 79 | string: true, 80 | describe: `Output directory. Default: \n - ${defaultOutputDir}`, 81 | }) 82 | .option('c', { 83 | alias: aliasesMap.OUTPUT_DIR_CLEAN, 84 | boolean: true, 85 | describe: `Clean output directory before running`, 86 | }) 87 | .option('m', { 88 | alias: aliasesMap.TIMESTAMPS, 89 | boolean: true, 90 | describe: `Add default timestamps to tables`, 91 | }) 92 | .option('C', { 93 | alias: aliasesMap.CASE, 94 | string: true, 95 | describe: `Transform tables and fields names with one of the following cases: 96 | - underscore 97 | - camel 98 | - upper 99 | - lower 100 | - pascal 101 | - const 102 | You can also specify a different case for model and columns using the following format: 103 | : 104 | `, 105 | }).option('S', { 106 | alias: aliasesMap.STORAGE, 107 | string: true, 108 | describe: `SQLite storage. Default: \n - memory`, 109 | }).option('L', { 110 | alias: aliasesMap.LINT_FILE, 111 | string: true, 112 | describe: `ES Lint file path`, 113 | }).option('l', { 114 | alias: aliasesMap.SSL, 115 | boolean: true, 116 | describe: `Enable SSL`, 117 | }).option('r', { 118 | alias: aliasesMap.PROTOCOL, 119 | string: true, 120 | describe: `Protocol used: Default: \n - tcp`, 121 | }).option('a', { 122 | alias: aliasesMap.ASSOCIATIONS_FILE, 123 | string: true, 124 | describe: `Associations file path`, 125 | }).option('g', { 126 | alias: aliasesMap.ENABLE_SEQUELIZE_LOGS, 127 | boolean: true, 128 | describe: `Enable Sequelize logs`, 129 | }).option('n', { 130 | alias: aliasesMap.DIALECT_OPTIONS, 131 | type: 'string', 132 | describe: `Dialect native options passed as json string.`, 133 | }).option('f', { 134 | alias: aliasesMap.DIALECT_OPTIONS_FILE, 135 | type: 'string', 136 | describe: `Dialect native options passed as json file path.`, 137 | }).option('R', { 138 | alias: aliasesMap.DISABLE_STRICT, 139 | boolean: true, 140 | describe: `Disable strict typescript class declaration.`, 141 | }).option('V', { 142 | alias: aliasesMap.DISABLE_VIEWS, 143 | boolean: true, 144 | describe: `Disable views generation. Available for: MySQL and MariaDB.`, 145 | }); 146 | 147 | validateArgs(argv); 148 | 149 | const config = buildConfig(argv); 150 | const dialect = buildDialect(argv); 151 | 152 | const builder = new ModelBuilder(config, dialect); 153 | await builder.build(); 154 | console.log(`All done!`); 155 | }; 156 | 157 | (async () => { 158 | await cli(); 159 | })(); 160 | -------------------------------------------------------------------------------- /src/bin/utils.ts: -------------------------------------------------------------------------------- 1 | import path from 'path'; 2 | import fs from 'fs'; 3 | import { Dialect as DialectType } from 'sequelize'; 4 | import { Dialect } from '../dialects/Dialect'; 5 | 6 | import { 7 | DialectMySQL, 8 | DialectPostgres, 9 | DialectMSSQL, 10 | DialectMariaDB, 11 | DialectSQLite, 12 | } from '../dialects'; 13 | 14 | import { 15 | IConfig, 16 | TransformCases, 17 | TransformCase, 18 | TransformMap, 19 | TransformTarget 20 | } from '../config/IConfig'; 21 | 22 | export type ArgvType = { [key: string]: any }; 23 | 24 | export const defaultOutputDir = 'output-models'; 25 | 26 | export const aliasesMap = { 27 | HOST: 'host', 28 | PORT: 'port', 29 | DATABASE: 'database', 30 | DIALECT: 'dialect', 31 | SCHEMA: 'schema', 32 | USERNAME: 'username', 33 | PASSWORD: 'password', 34 | TABLES: 'tables', 35 | SKIP_TABLES: 'skip-tables', 36 | OUTPUT_DIR: 'out-dir', 37 | OUTPUT_DIR_CLEAN: 'clean', 38 | INDICES: 'indices', 39 | TIMESTAMPS: 'timestamps', 40 | CASE: 'case', 41 | STORAGE: 'storage', 42 | LINT_FILE: 'lint-file', 43 | SSL: 'ssl', 44 | PROTOCOL: 'protocol', 45 | ASSOCIATIONS_FILE: 'associations-file', 46 | ENABLE_SEQUELIZE_LOGS: 'logs', 47 | DIALECT_OPTIONS: 'dialect-options', 48 | DIALECT_OPTIONS_FILE: 'dialect-options-file', 49 | DISABLE_STRICT: 'no-strict', 50 | DISABLE_VIEWS: 'no-views', 51 | }; 52 | 53 | /** 54 | * Diplay error message and exit 55 | * @param {string} msg 56 | * @returns {void} 57 | */ 58 | export const error = (msg: string): void => { 59 | console.error('[ValidationError]', msg); 60 | process.exit(1); 61 | }; 62 | 63 | /** 64 | * Parse case argument 65 | * @param {string} arg 66 | * @returns { TransformCase | TransformMap } 67 | */ 68 | export const parseCase = (arg: string): TransformCase | TransformMap => { 69 | if (arg.includes(':')) { 70 | const tokens = arg.split(':'); 71 | const modelCase = tokens[0].toUpperCase() as TransformCase; 72 | const columnCase = tokens[1].toUpperCase() as TransformCase; 73 | 74 | return { 75 | [TransformTarget.MODEL]: modelCase, 76 | [TransformTarget.COLUMN]: columnCase 77 | }; 78 | } 79 | 80 | return arg.toUpperCase() as TransformCase; 81 | }; 82 | 83 | /** 84 | * Parse dialect options from json string 85 | * @param {string} json 86 | * @returns {object} Dialect options object 87 | */ 88 | const buildDialectOptionsFromString = (json: string): object => { 89 | let parsed: object; 90 | 91 | try { 92 | parsed = JSON.parse(json); 93 | } 94 | catch(err) { 95 | console.error(`Invalid json for argument --dialect-options`, err); 96 | process.exit(1); 97 | } 98 | 99 | return parsed; 100 | }; 101 | 102 | /** 103 | * Parse dialect options from json file 104 | * @param {string} path 105 | * @returns {object} Dialect options object 106 | */ 107 | const buildDialectOptionsFromFile = (path: string): object => { 108 | let content: string; 109 | let parsed: object; 110 | 111 | try { 112 | content = fs.readFileSync(path).toString(); 113 | } 114 | catch(err) { 115 | error(`Argument -f [--dialect-options-file] '${path}' is not a valid path`); 116 | } 117 | 118 | try { 119 | parsed = JSON.parse(content!); 120 | } 121 | catch(err) { 122 | console.error(`Invalid json for argument --dialect-options`, err); 123 | process.exit(1); 124 | } 125 | 126 | return parsed; 127 | }; 128 | 129 | /** 130 | * Build config object from parsed arguments 131 | * @param { [key: string]: any } argv 132 | * Returns {IConfig} 133 | */ 134 | export const buildConfig = (argv: ArgvType): IConfig => { 135 | const config: IConfig = { 136 | connection: { 137 | dialect: argv[aliasesMap.DIALECT] as DialectType, 138 | ...argv[aliasesMap.HOST] && { host: argv[aliasesMap.HOST] as string }, 139 | ...argv[aliasesMap.PORT] && { port: argv[aliasesMap.PORT] as number }, 140 | ...argv[aliasesMap.DATABASE] && { database: argv[aliasesMap.DATABASE] as string }, 141 | ...argv[aliasesMap.SCHEMA] && { schema: argv[aliasesMap.SCHEMA] as string }, 142 | ...argv[aliasesMap.USERNAME] && { username: argv[aliasesMap.USERNAME] as string }, 143 | ...argv[aliasesMap.PASSWORD] && { password: argv[aliasesMap.PASSWORD] as string }, 144 | ...argv[aliasesMap.SSL] && { ssl: true }, 145 | ...argv[aliasesMap.PROTOCOL] && { protocol: argv[aliasesMap.PROTOCOL] as string }, 146 | 147 | ...argv[aliasesMap.DIALECT] === 'mariadb' && { dialectOptions: { 148 | timezone: 'Etc/GMT-3', 149 | } 150 | }, 151 | 152 | ...argv[aliasesMap.DIALECT] === 'sqlite' && { 153 | storage: argv[aliasesMap.STORAGE] ?? 'memory', 154 | }, 155 | 156 | ...argv[aliasesMap.DIALECT_OPTIONS_FILE] && { 157 | dialectOptions: buildDialectOptionsFromFile(argv[aliasesMap.DIALECT_OPTIONS_FILE]), 158 | }, 159 | 160 | ...argv[aliasesMap.DIALECT_OPTIONS] && { 161 | dialectOptions: buildDialectOptionsFromString(argv[aliasesMap.DIALECT_OPTIONS]), 162 | }, 163 | 164 | logQueryParameters: true, 165 | logging: argv[aliasesMap.ENABLE_SEQUELIZE_LOGS], 166 | }, 167 | metadata: { 168 | ...argv[aliasesMap.TABLES] && { 169 | tables: (argv[aliasesMap.TABLES] as string) 170 | .split(',') 171 | .map(tableName => tableName.toLowerCase()) 172 | }, 173 | ...argv[aliasesMap.SKIP_TABLES] && { 174 | skipTables: (argv[aliasesMap.SKIP_TABLES] as string) 175 | .split(',') 176 | .map(tableName => tableName.toLowerCase()) 177 | }, 178 | indices: !!argv[aliasesMap.INDICES], 179 | timestamps: !!argv[aliasesMap.TIMESTAMPS], 180 | ...argv[aliasesMap.CASE] && { case: parseCase(argv[aliasesMap.CASE]) }, 181 | ...argv[aliasesMap.ASSOCIATIONS_FILE] && { associationsFile: argv[aliasesMap.ASSOCIATIONS_FILE] as string }, 182 | noViews: !!argv[aliasesMap.DISABLE_VIEWS], 183 | }, 184 | output: { 185 | outDir: argv[aliasesMap.OUTPUT_DIR] ? 186 | path.isAbsolute(argv[aliasesMap.OUTPUT_DIR] as string) ? 187 | argv[aliasesMap.OUTPUT_DIR] as string 188 | : path.join(process.cwd(), argv[aliasesMap.OUTPUT_DIR] as string) 189 | : path.join(process.cwd(), defaultOutputDir), 190 | clean: !!argv[aliasesMap.OUTPUT_DIR_CLEAN], 191 | }, 192 | strict: !(!!argv[aliasesMap.DISABLE_STRICT]), 193 | ...argv[aliasesMap.LINT_FILE] && { 194 | lintOptions: { 195 | configFile: argv[aliasesMap.LINT_FILE], 196 | fix: true, 197 | } 198 | }, 199 | }; 200 | 201 | return config; 202 | }; 203 | 204 | /** 205 | * Build dialect object from parsed arguments 206 | * @param { [key: string]: any } argv 207 | * Returns {Dialect} 208 | */ 209 | export const buildDialect = (argv: ArgvType): Dialect => { 210 | let dialect: Dialect; 211 | 212 | switch (argv[aliasesMap.DIALECT]) { 213 | case 'postgres': 214 | dialect = new DialectPostgres(); 215 | break; 216 | case 'mysql': 217 | dialect = new DialectMySQL(); 218 | break; 219 | case 'mariadb': 220 | dialect = new DialectMariaDB(); 221 | break; 222 | case 'sqlite': 223 | dialect = new DialectSQLite(); 224 | break; 225 | case 'mssql': 226 | dialect = new DialectMSSQL(); 227 | break; 228 | default: 229 | error(`Unknown dialect ${argv[aliasesMap.DIALECT]}`); 230 | } 231 | 232 | return dialect!; 233 | }; 234 | 235 | /** 236 | * Validate arguments 237 | * @param { [key: string]: any } argv 238 | * @returns {void} 239 | */ 240 | export const validateArgs = (argv: ArgvType): void => { 241 | // Validate dialect 242 | if (!Dialect.dialects.has(argv[aliasesMap.DIALECT])) { 243 | error(`Required argument -D must be one of (${Array.from(Dialect.dialects).join(', ')})`); 244 | } 245 | 246 | // Validate database 247 | if (argv[aliasesMap.DIALECT] !== 'sqlite' && !argv[aliasesMap.DATABASE]) { 248 | error(`Argument -d [database] is required for dialect ${argv[aliasesMap.DIALECT]}`); 249 | } 250 | 251 | // Validate port 252 | if (argv[aliasesMap.PORT] && (!Number.isInteger(argv[aliasesMap.PORT]) || argv[aliasesMap.PORT] <= 0)) { 253 | error(`Argument -p [port] must be a positive integer (${argv[aliasesMap.PORT]})`); 254 | } 255 | 256 | // Validate case 257 | if (argv[aliasesMap.CASE]) { 258 | if (argv[aliasesMap.CASE].includes(':')) { 259 | const tokens = argv[aliasesMap.CASE].split(':'); 260 | const modelCase = tokens[0].toUpperCase(); 261 | const columnCase = tokens[1].toUpperCase(); 262 | 263 | if (!TransformCases.has(modelCase)) { 264 | error(`Unknown case '${modelCase}': must be one of (${Array.from(TransformCases).join(', ').toLowerCase()})`); 265 | } 266 | 267 | if (!TransformCases.has(columnCase)) { 268 | error(`Unknown case '${columnCase}': must be one of (${Array.from(TransformCases).join(', ').toLowerCase()})`); 269 | } 270 | } 271 | else if (!TransformCases.has(argv[aliasesMap.CASE].toUpperCase())) { 272 | error(`Argument -c [case] must be one of (${Array.from(TransformCases).join(', ').toLowerCase()})`); 273 | } 274 | } 275 | 276 | // Validate lint file 277 | if (argv[aliasesMap.LINT_FILE]) { 278 | try { 279 | fs.accessSync(argv[aliasesMap.LINT_FILE]); 280 | } 281 | catch(err) { 282 | error(`Argument -L [lint-file] '${argv[aliasesMap.LINT_FILE]}' is not a valid path`); 283 | } 284 | } 285 | 286 | // Validate associations file 287 | if (argv[aliasesMap.ASSOCIATIONS_FILE]) { 288 | try { 289 | fs.accessSync(argv[aliasesMap.ASSOCIATIONS_FILE]); 290 | } 291 | catch(err) { 292 | error(`Argument -a [associations-file] '${argv[aliasesMap.ASSOCIATIONS_FILE]}' is not a valid path`); 293 | } 294 | } 295 | 296 | // TODO Validate schema if dialect is postgres ? 297 | }; 298 | -------------------------------------------------------------------------------- /src/builders/Builder.ts: -------------------------------------------------------------------------------- 1 | import { IConfig } from '../config'; 2 | import { Dialect } from '../dialects/Dialect'; 3 | 4 | /** 5 | * @class Builder 6 | * @constructor 7 | * @param {IConfig} config 8 | * @param {Dialect} dialect 9 | */ 10 | export abstract class Builder { 11 | private _config: IConfig; 12 | private _dialect: Dialect; 13 | 14 | protected constructor(config: IConfig, dialect: Dialect) { 15 | this._config = config; 16 | this._dialect = dialect; 17 | 18 | // Default Postgres schema if not provided 19 | if (dialect.name === 'postgres' && !config.connection.schema) { 20 | config.connection.schema = 'public'; 21 | } 22 | } 23 | 24 | get config(): IConfig { 25 | return this._config; 26 | } 27 | 28 | set config(value: IConfig) { 29 | this._config = value; 30 | } 31 | 32 | get dialect(): Dialect { 33 | return this._dialect; 34 | } 35 | 36 | set dialect(value: Dialect) { 37 | this._dialect = value; 38 | } 39 | 40 | /** 41 | * Build files with the given configuration and dialect 42 | * @returns {Promise} 43 | */ 44 | abstract build(): Promise; 45 | } 46 | -------------------------------------------------------------------------------- /src/builders/index.ts: -------------------------------------------------------------------------------- 1 | export { ModelBuilder } from './ModelBuilder'; 2 | -------------------------------------------------------------------------------- /src/builders/utils.ts: -------------------------------------------------------------------------------- 1 | import * as ts from 'typescript'; 2 | 3 | const printer = ts.createPrinter({ 4 | newLine: ts.NewLineKind.LineFeed, 5 | }); 6 | 7 | /** 8 | * Returns string representation of typescript node 9 | * @param node 10 | * @returns {string} 11 | */ 12 | export const nodeToString = (node: ts.Node): string => { 13 | const sourceFile = ts.createSourceFile( 14 | `source.ts`, 15 | ``, 16 | ts.ScriptTarget.Latest, 17 | false, 18 | ts.ScriptKind.TS 19 | ); 20 | 21 | const sourceCode = printer.printNode(ts.EmitHint.Unspecified, node, sourceFile); 22 | 23 | // Typescript automatically escape non ASCII characters like 哈 or 😂. This is a workaround to render them properly. 24 | // Reference: https://github.com/microsoft/TypeScript/issues/36174 25 | return unescape(sourceCode.replace(/\\u/g, "%u")); 26 | }; 27 | 28 | /** 29 | * Generate named imports code (e.g. `import { Something, Else } from "module"`) 30 | * @param {string[]} importsSpecifier 31 | * @param {string} moduleSpecifier 32 | * @returns {string} Named import code 33 | */ 34 | export const generateNamedImports = (importsSpecifier: string[], moduleSpecifier: string): ts.ImportDeclaration => { 35 | return ts.factory.createImportDeclaration( 36 | undefined, 37 | ts.factory.createImportClause( 38 | false, 39 | undefined, 40 | ts.factory.createNamedImports( 41 | [ 42 | ...importsSpecifier 43 | .map(is => ts.factory.createImportSpecifier(false, undefined, ts.factory.createIdentifier(is))) 44 | ] 45 | ) 46 | ), 47 | ts.factory.createStringLiteral(moduleSpecifier) 48 | ); 49 | }; 50 | 51 | /** 52 | * Generate model export for index file 53 | * @param {string} modelFileName 54 | * @returns {ts.ExportDeclaration} 55 | */ 56 | export const generateIndexExport = (modelFileName: string): ts.ExportDeclaration => { 57 | return ts.factory.createExportDeclaration( 58 | undefined, 59 | false, 60 | undefined, 61 | ts.factory.createStringLiteral(`./${modelFileName}`) 62 | ); 63 | }; 64 | 65 | /** 66 | * Generate object literal decorator 67 | * @param {string} decoratorIdentifier 68 | * @param {[key: string]: any} props 69 | * @return {ts.Decorator} 70 | */ 71 | export const generateObjectLiteralDecorator = ( 72 | decoratorIdentifier: string, 73 | props: { [key: string]: any } 74 | ): ts.Decorator => { 75 | const _createPropertyAssignment = (propName: string, propValue: any): ts.PropertyAssignment => { 76 | let expression: ts.Expression; 77 | 78 | switch (typeof propValue) { 79 | case 'number': 80 | expression = ts.factory.createNumericLiteral(propValue); 81 | break; 82 | case 'string': 83 | if (propValue.startsWith('DataType.') || propValue.startsWith('Sequelize.')) { 84 | expression = ts.factory.createIdentifier(propValue); 85 | } 86 | else { 87 | expression = ts.factory.createStringLiteral(propValue); 88 | } 89 | break; 90 | case 'boolean': 91 | if (propValue) { 92 | expression = ts.factory.createTrue(); 93 | } 94 | else { 95 | expression = ts.factory.createFalse(); 96 | } 97 | break; 98 | default: 99 | expression = ts.factory.createIdentifier(propValue); 100 | } 101 | 102 | return ts.factory.createPropertyAssignment(propName, expression); 103 | } 104 | 105 | return ts.factory.createDecorator( 106 | ts.factory.createCallExpression( 107 | ts.factory.createIdentifier(decoratorIdentifier), 108 | undefined, 109 | [ 110 | ts.factory.createObjectLiteralExpression( 111 | [ 112 | ...Object.entries(props) 113 | .map(e => _createPropertyAssignment(e[0], e[1])) 114 | ] 115 | ) 116 | ] 117 | ) 118 | ); 119 | }; 120 | 121 | /** 122 | * Generate arrow decorator 123 | * @param {string} decoratorIdentifier 124 | * @param {string[]} arrowTargetIdentifiers 125 | * @param {object} objectLiteralProps 126 | * @returns {ts.Decorator} 127 | */ 128 | export const generateArrowDecorator = ( 129 | decoratorIdentifier: string, 130 | arrowTargetIdentifiers: string[], 131 | objectLiteralProps?: object 132 | ): ts.Decorator => { 133 | const argumentsArray: ts.Expression[] = arrowTargetIdentifiers.map(t => 134 | ts.factory.createArrowFunction( 135 | undefined, 136 | undefined, 137 | [], 138 | undefined, 139 | ts.factory.createToken(ts.SyntaxKind.EqualsGreaterThanToken), 140 | ts.factory.createIdentifier(t) 141 | ), 142 | ); 143 | 144 | objectLiteralProps && argumentsArray.push( 145 | ts.factory.createObjectLiteralExpression([ 146 | ...Object.entries(objectLiteralProps).map(e => { 147 | let initializer: ts.Expression; 148 | 149 | switch (typeof e[1]) { 150 | case 'number': 151 | initializer = ts.factory.createNumericLiteral(e[1]); 152 | break; 153 | case 'boolean': 154 | initializer = e[1] ? ts.factory.createTrue() : ts.factory.createFalse(); 155 | break; 156 | default: 157 | initializer = ts.factory.createStringLiteral(e[1]); 158 | break; 159 | } 160 | 161 | return ts.factory.createPropertyAssignment(e[0], initializer); 162 | }), 163 | ]) 164 | ); 165 | 166 | return ts.factory.createDecorator( 167 | ts.factory.createCallExpression( 168 | ts.factory.createIdentifier(decoratorIdentifier), 169 | undefined, 170 | argumentsArray 171 | ) 172 | ); 173 | }; 174 | -------------------------------------------------------------------------------- /src/config/IConfig.ts: -------------------------------------------------------------------------------- 1 | import { Options } from 'sequelize'; 2 | import { ESLint } from 'eslint'; 3 | 4 | export type TransformCase = 'UPPER' | 'LOWER' | 'UNDERSCORE' | 'CAMEL' | 'PASCAL' | 'CONST'; 5 | 6 | export enum TransformTarget { 7 | MODEL = 'model', 8 | COLUMN = 'column' 9 | } 10 | 11 | export type TransformMap = { 12 | [key in TransformTarget]: TransformCase; 13 | } 14 | 15 | export type TransformFn = (value: string, target: TransformTarget) => string; 16 | 17 | export const TransformCases = new Set([ 18 | 'UPPER', 19 | 'LOWER', 20 | 'UNDERSCORE', 21 | 'CAMEL', 22 | 'PASCAL', 23 | 'CONST' 24 | ]); 25 | 26 | export interface IConfigMetadata { 27 | tables?: string[]; 28 | skipTables?: string[]; 29 | indices?: boolean; 30 | timestamps?: boolean; 31 | case?: TransformCase | TransformMap | TransformFn; 32 | associationsFile?: string; 33 | noViews?: boolean; 34 | } 35 | 36 | export interface IConfigOutput { 37 | clean?: boolean; // clean output dir before build 38 | outDir: string; // output directory 39 | } 40 | 41 | export interface IConfig { 42 | connection: Options; 43 | metadata?: IConfigMetadata; 44 | output: IConfigOutput; 45 | lintOptions?: ESLint.Options; 46 | strict?: boolean; 47 | } 48 | -------------------------------------------------------------------------------- /src/config/index.ts: -------------------------------------------------------------------------------- 1 | export { IConfig } from './IConfig'; 2 | -------------------------------------------------------------------------------- /src/connection/createConnection.ts: -------------------------------------------------------------------------------- 1 | import { Options } from 'sequelize'; 2 | import { Sequelize } from 'sequelize-typescript'; 3 | 4 | /** 5 | * Create a new sequelize connection 6 | * @param {Options} options 7 | * @returns {Sequelize} 8 | */ 9 | export const createConnection = (options: Options): Sequelize => { 10 | return new Sequelize(options); 11 | }; 12 | -------------------------------------------------------------------------------- /src/connection/index.ts: -------------------------------------------------------------------------------- 1 | export * from './createConnection'; 2 | -------------------------------------------------------------------------------- /src/dialects/AssociationsParser.ts: -------------------------------------------------------------------------------- 1 | import fs from 'fs'; 2 | import readline from 'readline'; 3 | 4 | const cardinalities = new Set([ 5 | '1:1', 6 | '1:N', 7 | 'N:N' 8 | ]); 9 | 10 | type AssociationRow = [ 11 | string, // cardinality 12 | string, // left key 13 | string, // right key 14 | string, // left table 15 | string, // right table 16 | string? // [join table] 17 | ]; 18 | 19 | export interface IAssociationMetadata { 20 | associationName: 'HasOne' | 'HasMany' | 'BelongsTo' | 'BelongsToMany'; 21 | targetModel: string; 22 | joinModel?: string; 23 | sourceKey?: string; // Left table key for HasOne and HasMany associations 24 | } 25 | 26 | export interface IForeignKey { 27 | name: string; 28 | targetModel: string; 29 | } 30 | 31 | export interface IAssociationsParsed { 32 | [tableName: string]: { 33 | foreignKeys: IForeignKey[]; 34 | associations: IAssociationMetadata[]; 35 | } 36 | } 37 | 38 | const validateRow = (row: AssociationRow): void => { 39 | const [ 40 | cardinality, 41 | leftkey, 42 | rightKey, 43 | leftTable, 44 | rightTable, 45 | joinTable 46 | ] = row; 47 | 48 | if (!cardinalities.has(cardinality)) { 49 | throw new Error(`Invalid cardinality: must be one of (${Array.from(cardinalities).join(', ')}). Received ${cardinality}`); 50 | } 51 | 52 | if (!leftkey || !leftkey.length) { 53 | throw new Error(`Missing required leftKey in association row`); 54 | } 55 | 56 | if (!rightKey || !rightKey.length) { 57 | throw new Error(`Missing required rightKey in association row`); 58 | } 59 | 60 | if (!leftTable || !leftTable.length) { 61 | throw new Error(`Missing required leftTable in association row`); 62 | } 63 | 64 | if (!rightTable || !rightTable.length) { 65 | throw new Error(`Missing required rightTable in association row`); 66 | } 67 | 68 | if (cardinality === 'N:N' && (!joinTable || !joinTable.length)) { 69 | throw new Error(`Association N:N requires a joinTable in the association row`); 70 | } 71 | } 72 | 73 | /** 74 | * @class AssociationsParser 75 | */ 76 | export class AssociationsParser { 77 | 78 | private static associationsMetadata: IAssociationsParsed | undefined; 79 | 80 | /** 81 | * Parse associations file 82 | * @param {string} path 83 | * @returns {IAssociationsParsed} 84 | */ 85 | static parse(path: string): IAssociationsParsed { 86 | // Return cached value if already set 87 | if (this.associationsMetadata) { 88 | return this.associationsMetadata; 89 | } 90 | 91 | const associationsMetadata: IAssociationsParsed = {}; 92 | 93 | const lines = fs.readFileSync(path) 94 | .toString() 95 | .split('\n') 96 | .filter(line => line.length); // Filter empty lines 97 | 98 | for (const line of lines) { 99 | const row = line 100 | .split(',') 101 | .map((t, i) => i === 0 ? t.toUpperCase() : t) // Capitalize cardinality 102 | .map(t => t.trim()) as AssociationRow; 103 | 104 | validateRow(row); 105 | 106 | const [ 107 | cardinality, 108 | leftKey, 109 | rightKey, 110 | leftModel, 111 | rightModel, 112 | joinModel 113 | ] = row; 114 | 115 | const [ 116 | leftCardinality, 117 | rightCardinality 118 | ] = cardinality.split(':'); 119 | 120 | // Add entry for left table 121 | if (!associationsMetadata[leftModel]) { 122 | associationsMetadata[leftModel] = { 123 | foreignKeys: [], 124 | associations: [], 125 | }; 126 | } 127 | 128 | // Add entry for right table 129 | if (!associationsMetadata[rightModel]) { 130 | associationsMetadata[rightModel] = { 131 | foreignKeys: [], 132 | associations: [], 133 | }; 134 | } 135 | 136 | // 1:1 and 1:N association 137 | if (cardinality !== 'N:N') { 138 | associationsMetadata[leftModel].associations.push({ 139 | associationName: rightCardinality === '1' ? 'HasOne' : 'HasMany', 140 | targetModel: rightModel, 141 | sourceKey: leftKey, 142 | }); 143 | 144 | associationsMetadata[rightModel].associations.push({ 145 | associationName: 'BelongsTo', 146 | targetModel: leftModel, 147 | }); 148 | 149 | associationsMetadata[rightModel].foreignKeys.push({ 150 | name: rightKey, 151 | targetModel: leftModel, 152 | }); 153 | } 154 | // N:N association 155 | else { 156 | // Add entry for join table 157 | if (!associationsMetadata[joinModel!]) { 158 | associationsMetadata[joinModel!] = { 159 | foreignKeys: [], 160 | associations: [], 161 | }; 162 | } 163 | 164 | associationsMetadata[leftModel].associations.push({ 165 | associationName: 'BelongsToMany', 166 | targetModel: rightModel, 167 | joinModel: joinModel, 168 | }); 169 | 170 | associationsMetadata[rightModel].associations.push({ 171 | associationName: 'BelongsToMany', 172 | targetModel: leftModel, 173 | joinModel: joinModel, 174 | }); 175 | 176 | associationsMetadata[joinModel!].foreignKeys.push({ 177 | name: leftKey, 178 | targetModel: leftModel 179 | }); 180 | 181 | associationsMetadata[joinModel!].foreignKeys.push({ 182 | name: rightKey, 183 | targetModel: rightModel 184 | }); 185 | } 186 | 187 | } 188 | 189 | // Cache result 190 | this.associationsMetadata = associationsMetadata; 191 | 192 | return this.associationsMetadata; 193 | } 194 | 195 | } 196 | -------------------------------------------------------------------------------- /src/dialects/Dialect.ts: -------------------------------------------------------------------------------- 1 | import { IndexType, IndexMethod, AbstractDataTypeConstructor } from 'sequelize'; 2 | import { Sequelize } from 'sequelize-typescript'; 3 | import { IConfig } from '../config'; 4 | import { createConnection } from "../connection"; 5 | import { AssociationsParser, IAssociationsParsed, IAssociationMetadata } from './AssociationsParser' 6 | import { caseTransformer } from './utils'; 7 | 8 | export interface ITablesMetadata { 9 | [tableName: string]: ITableMetadata; 10 | } 11 | 12 | export interface ITableMetadata { 13 | name: string; // Model name 14 | originName: string; // Database table name 15 | schema?: 'public' | string; // Postgres only 16 | timestamps?: boolean; 17 | columns: { 18 | [columnName: string]: IColumnMetadata; 19 | } 20 | associations?: IAssociationMetadata[]; 21 | comment?: string; 22 | } 23 | 24 | export interface IColumnMetadata { 25 | name: string; // Model field name 26 | originName: string; // Database column name 27 | type: string; 28 | typeExt: string; 29 | dataType?: string; 30 | primaryKey: boolean; 31 | foreignKey?: { 32 | name: string; 33 | targetModel: string; 34 | } 35 | allowNull: boolean; 36 | autoIncrement: boolean; 37 | indices?: IIndexMetadata[], 38 | comment?: string; 39 | defaultValue?: any; 40 | } 41 | 42 | export interface IIndexMetadata { 43 | name: string; 44 | type?: IndexType; 45 | unique?: boolean; 46 | using?: IndexMethod; 47 | collation?: string | null; 48 | seq?: number; 49 | } 50 | 51 | export interface ITable { 52 | name: string; 53 | comment?: string; 54 | } 55 | 56 | type DialectName = 'postgres' | 'mysql' | 'mariadb' | 'sqlite' | 'mssql'; 57 | 58 | export abstract class Dialect { 59 | /** 60 | * Accepted dialects 61 | */ 62 | public static dialects: Set = new Set([ 63 | 'postgres', 64 | 'mysql', 65 | 'mariadb', 66 | 'sqlite', 67 | 'mssql', 68 | ]); 69 | 70 | /** 71 | * Dialect name 72 | */ 73 | public name: DialectName; 74 | 75 | /** 76 | * @constructor 77 | * @param {DialectName} name 78 | * @protected 79 | */ 80 | protected constructor(name: DialectName) { 81 | this.name = name; 82 | } 83 | 84 | /** 85 | * Map database data type to sequelize data type 86 | * @param {string} dbType 87 | * @returns {string} 88 | */ 89 | public abstract mapDbTypeToSequelize(dbType: string): AbstractDataTypeConstructor; 90 | 91 | /** 92 | * Map database data type to javascript data type 93 | * @param {string} dbType 94 | * @returns {string 95 | */ 96 | public abstract mapDbTypeToJs(dbType: string): string; 97 | 98 | /** 99 | * Map database default values to Sequelize type (e.g. uuid() => DataType.UUIDV4). 100 | * @param {string} v 101 | * @returns {string} 102 | */ 103 | public abstract mapDefaultValueToSequelize(v: string): string; 104 | 105 | /** 106 | * Fetch table names for the provided database/schema 107 | * @param {Sequelize} connection 108 | * @param {IConfig} config 109 | * @returns {Promise} 110 | */ 111 | protected abstract fetchTables( 112 | connection: Sequelize, 113 | config: IConfig 114 | ): Promise; 115 | 116 | /** 117 | * Fetch columns metadata for the provided schema and table 118 | * @param {Sequelize} connection 119 | * @param {IConfig} config 120 | * @param {string} table 121 | * @returns {Promise} 122 | */ 123 | protected abstract fetchColumnsMetadata( 124 | connection: Sequelize, 125 | config: IConfig, 126 | table: string 127 | ): Promise; 128 | 129 | /** 130 | * Fetch index metadata for the provided table and column 131 | * @param {Sequelize} connection 132 | * @param {IConfig} config 133 | * @param {string} table 134 | * @param {string} column 135 | * @returns {Promise} 136 | */ 137 | protected abstract fetchColumnIndexMetadata( 138 | connection: Sequelize, 139 | config: IConfig, 140 | table: string, 141 | column: string 142 | ): Promise; 143 | 144 | /** 145 | * Build tables metadata for the specific dialect and schema 146 | * @param {IConfig} config 147 | * @returns {Promise} 148 | */ 149 | public async buildTablesMetadata(config: IConfig): Promise { 150 | let connection: Sequelize | undefined; 151 | const tablesMetadata: ITablesMetadata = {}; 152 | 153 | try { 154 | connection = createConnection(config.connection); 155 | 156 | await connection.authenticate(); 157 | 158 | let tables = await this.fetchTables(connection, config); 159 | 160 | // Apply filters 161 | tables = tables 162 | .filter(({ name }) => { 163 | if (config.metadata?.tables?.length) { 164 | return config.metadata.tables.includes(name.toLowerCase()); 165 | } 166 | else { 167 | return true; 168 | } 169 | }).filter(({ name }) => { 170 | if (config.metadata?.skipTables?.length) { 171 | return !(config.metadata.skipTables.includes(name.toLowerCase())); 172 | } 173 | else { 174 | return true; 175 | } 176 | }); 177 | 178 | for (const { name: tableName, comment: tableComment } of tables) { 179 | const columnsMetadata = await this.fetchColumnsMetadata(connection, config, tableName); 180 | 181 | // Fetch indices metadata if required 182 | if (config.metadata?.indices) { 183 | for (const column of columnsMetadata) { 184 | column.indices = await this.fetchColumnIndexMetadata(connection, config, tableName, column.name); 185 | } 186 | } 187 | 188 | const tableMetadata: ITableMetadata = { 189 | originName: tableName, 190 | name: tableName, 191 | schema: config.connection.schema, 192 | timestamps: config.metadata?.timestamps ?? false, 193 | columns: {}, 194 | comment: tableComment ?? undefined, 195 | }; 196 | 197 | for (const columnMetadata of columnsMetadata) { 198 | tableMetadata.columns[columnMetadata.name] = columnMetadata; 199 | } 200 | 201 | tablesMetadata[tableMetadata.originName] = tableMetadata; 202 | } 203 | } 204 | catch(err) { 205 | console.error(err); 206 | process.exit(1); 207 | } 208 | finally { 209 | connection && await connection.close(); 210 | } 211 | 212 | // Apply associations if required 213 | if (config.metadata?.associationsFile) { 214 | const parsedAssociations = AssociationsParser.parse(config.metadata?.associationsFile); 215 | 216 | for (const [tableName, association] of Object.entries(parsedAssociations)) { 217 | if(!tablesMetadata[tableName]) { 218 | console.warn('[WARNING]', `Associated table ${tableName} not found among (${Object.keys(tablesMetadata).join(', ')})`); 219 | continue; 220 | } 221 | 222 | // Attach associations to table 223 | tablesMetadata[tableName].associations = association.associations; 224 | 225 | const { columns } = tablesMetadata[tableName]; 226 | 227 | // Override foreign keys 228 | for (const { name: columnName, targetModel } of association.foreignKeys) { 229 | if (!columns[columnName]) { 230 | console.warn('[WARNING]', `Foreign key column ${columnName} not found among (${Object.keys(columns).join(', ')})`); 231 | continue; 232 | } 233 | 234 | columns[columnName].foreignKey = { 235 | name: columnName, 236 | targetModel: targetModel 237 | }; 238 | } 239 | } 240 | } 241 | 242 | // Apply transformations if required 243 | if (config.metadata?.case) { 244 | for (const [tableName, tableMetadata] of Object.entries(tablesMetadata)) { 245 | tablesMetadata[tableName] = caseTransformer(tableMetadata, config.metadata.case); 246 | } 247 | } 248 | 249 | return tablesMetadata; 250 | } 251 | } 252 | -------------------------------------------------------------------------------- /src/dialects/DialectMSSQL.ts: -------------------------------------------------------------------------------- 1 | import { QueryTypes, AbstractDataTypeConstructor } from 'sequelize'; 2 | import { Sequelize, DataType } from 'sequelize-typescript'; 3 | import { IConfig } from '../config'; 4 | import { IColumnMetadata, IIndexMetadata, Dialect, ITable } from './Dialect'; 5 | import { generatePrecisionSignature, warnUnknownMappingForDataType } from './utils'; 6 | 7 | interface ITableRow { 8 | table_name: string; 9 | table_comment?: string; 10 | } 11 | 12 | interface IColumnMetadataMSSQL { 13 | CHARACTER_MAXIMUM_LENGTH: number | null; 14 | CHARACTER_OCTET_LENGTH: number | null; 15 | CHARACTER_SET_CATALOG: string ; 16 | CHARACTER_SET_NAME: string | null; 17 | CHARACTER_SET_SCHEMA: string | null; 18 | COLLATION_CATALOG: string | null; 19 | COLLATION_NAME: string | null; 20 | COLLATION_SCHEMA: string | null; 21 | COLUMN_DEFAULT: string | null; 22 | COLUMN_NAME: string; 23 | CONSTRAINT_NAME: string | null; 24 | CONSTRAINT_TYPE: string | null; 25 | DATA_TYPE: string; 26 | DATETIME_PRECISION: number | null; 27 | DOMAIN_CATALOG: string | null; 28 | DOMAIN_NAME: string | null; 29 | DOMAIN_SCHEMA: string | null; 30 | IS_NULLABLE: string; 31 | IS_IDENTITY: string; 32 | NUMERIC_PRECISION: number | null; 33 | NUMERIC_PRECISION_RADIX: number | null; 34 | NUMERIC_SCALE: number | null; 35 | ORDINAL_POSITION: number; 36 | TABLE_CATALOG: string; 37 | TABLE_NAME: string; 38 | TABLE_SCHEMA: string; 39 | COLUMN_COMMENT: string | null; 40 | } 41 | 42 | interface IIndexMetadataMSSQL { 43 | column_id: number; 44 | ColumnName: string; 45 | data_space_id: number; 46 | ignore_dup_key: string; 47 | index_id: number; 48 | IndexName: string; 49 | is_included_column: boolean; 50 | is_primary_key: boolean; 51 | is_unique: boolean; 52 | is_unique_constraint: boolean; 53 | TableName: string; 54 | type: number; 55 | type_desc: string; 56 | } 57 | 58 | const jsDataTypesMap: { [key: string]: string } = { 59 | int: 'number', 60 | bigint: 'string', 61 | tinyint: 'number', 62 | smallint: 'number', 63 | numeric: 'number', 64 | decimal: 'number', 65 | float: 'number', 66 | real: 'number', 67 | money: 'number', 68 | smallmoney: 'number', 69 | char: 'string', 70 | nchar: 'string', 71 | varchar: 'string', 72 | nvarchar: 'string', 73 | text: 'string', 74 | ntext: 'string', 75 | date: 'string', 76 | datetime: 'Date', 77 | datetime2: 'Date', 78 | timestamp: 'Date', 79 | datetimeoffset: 'Date', 80 | time: 'Date', 81 | smalldatetime: 'string', 82 | bit: 'boolean', 83 | binary: 'Uint8Array', 84 | varbinary: 'Uint8Array', 85 | uniqueidentifier: 'string', 86 | xml: 'string', 87 | geography: 'object', 88 | }; 89 | 90 | const sequelizeDataTypesMap: { [key: string]: AbstractDataTypeConstructor } = { 91 | int: DataType.INTEGER, 92 | bigint: DataType.BIGINT, 93 | tinyint: DataType.INTEGER, 94 | smallint: DataType.INTEGER, 95 | numeric: DataType.DECIMAL, 96 | decimal: DataType.DECIMAL, 97 | float: DataType.FLOAT, 98 | real: DataType.REAL, 99 | money: DataType.STRING, 100 | smallmoney: DataType.STRING, 101 | char: DataType.STRING, 102 | nchar: DataType.STRING, 103 | varchar: DataType.STRING, 104 | nvarchar: DataType.STRING, 105 | text: DataType.STRING, 106 | ntext: DataType.STRING, 107 | date: DataType.DATEONLY, 108 | datetime: DataType.DATE, 109 | datetime2: DataType.DATE, 110 | timestamp: DataType.DATE, 111 | datetimeoffset: DataType.STRING, 112 | time: DataType.TIME, 113 | smalldatetime: DataType.DATE, 114 | bit: DataType.STRING, 115 | binary: DataType.STRING, 116 | varbinary: DataType.STRING, 117 | uniqueidentifier: DataType.STRING, 118 | xml: DataType.STRING, 119 | geography: DataType.GEOGRAPHY, 120 | }; 121 | 122 | /** 123 | * Dialect for Postgres 124 | * @class DialectPostgres 125 | */ 126 | export class DialectMSSQL extends Dialect { 127 | 128 | constructor() { 129 | super('mssql'); 130 | } 131 | 132 | /** 133 | * Map database data type to sequelize data type 134 | * @param {string} dbType 135 | * @returns {string} 136 | */ 137 | public mapDbTypeToSequelize(dbType: string): AbstractDataTypeConstructor { 138 | return sequelizeDataTypesMap[dbType]; 139 | } 140 | 141 | /** 142 | * Map database data type to javascript data type 143 | * @param {string} dbType 144 | * @returns {string 145 | */ 146 | public mapDbTypeToJs(dbType: string): string { 147 | return jsDataTypesMap[dbType]; 148 | } 149 | 150 | /** 151 | * Map database default values to Sequelize type (e.g. uuid() => DataType.UUIDV4). 152 | * @param {string} v 153 | * @returns {string} 154 | */ 155 | public mapDefaultValueToSequelize(v: string): string { 156 | return v; 157 | } 158 | 159 | /** 160 | * Fetch table names for the provided database/schema 161 | * @param {Sequelize} connection 162 | * @param {IConfig} config 163 | * @returns {Promise} 164 | */ 165 | protected async fetchTables( 166 | connection: Sequelize, 167 | config: IConfig 168 | ): Promise { 169 | const query = ` 170 | SELECT 171 | t.name AS [table_name], 172 | td.value AS [table_comment] 173 | FROM sysobjects t 174 | INNER JOIN sysusers u 175 | ON u.uid = t.uid 176 | LEFT OUTER JOIN sys.extended_properties td 177 | ON td.major_id = t.id AND td.minor_id = 0 AND td.name = 'MS_Description' 178 | WHERE t.type = 'u'; 179 | `; 180 | 181 | const tables: ITable[] = (await connection.query( 182 | query, 183 | { 184 | type: QueryTypes.SELECT, 185 | raw: true, 186 | } 187 | ) as ITableRow[]).map(({ table_name, table_comment }) => { 188 | const t: ITable = { 189 | name: table_name, 190 | comment: table_comment ?? undefined, 191 | }; 192 | 193 | return t; 194 | }); 195 | 196 | return tables; 197 | } 198 | 199 | /** 200 | * Fetch columns metadata for the provided schema and table 201 | * @param {Sequelize} connection 202 | * @param {IConfig} config 203 | * @param {string} table 204 | * @returns {Promise} 205 | */ 206 | protected async fetchColumnsMetadata( 207 | connection: Sequelize, 208 | config: IConfig, 209 | table: string 210 | ): Promise { 211 | const columnsMetadata: IColumnMetadata[] = []; 212 | 213 | const query = ` 214 | SELECT 215 | c.*, 216 | CASE WHEN COLUMNPROPERTY(object_id(c.TABLE_SCHEMA +'.' + c.TABLE_NAME), c.COLUMN_NAME, 'IsIdentity') = 1 THEN 'YES' ELSE 'NO' END AS IS_IDENTITY, 217 | tc.CONSTRAINT_NAME, 218 | tc.CONSTRAINT_TYPE, 219 | ep.value AS [COLUMN_COMMENT] 220 | FROM information_schema.columns c 221 | LEFT OUTER JOIN information_schema.key_column_usage ku 222 | ON c.TABLE_CATALOG = ku.TABLE_CATALOG AND c.TABLE_NAME = ku.TABLE_NAME AND 223 | c.COLUMN_NAME = ku.COLUMN_NAME 224 | LEFT OUTER JOIN information_schema.table_constraints tc 225 | ON c.TABLE_CATALOG = tc.TABLE_CATALOG AND c.TABLE_NAME = tc.TABLE_NAME AND 226 | ku.CONSTRAINT_CATALOG = tc.CONSTRAINT_CATALOG AND ku.CONSTRAINT_NAME = tc.CONSTRAINT_NAME 227 | INNER JOIN sysobjects t 228 | ON c.TABLE_NAME = t.name AND t.type = 'u' 229 | INNER JOIN syscolumns sc 230 | ON sc.id = t.id AND sc.name = c.COLUMN_NAME 231 | LEFT OUTER JOIN sys.extended_properties ep 232 | ON ep.major_id = sc.id AND ep.minor_id = sc.colid AND ep.name = 'MS_Description' 233 | WHERE c.TABLE_CATALOG = N'${config.connection.database}' AND c.TABLE_NAME = N'${table}' 234 | ORDER BY c.ORDINAL_POSITION; 235 | `; 236 | 237 | const columns = await connection.query( 238 | query, 239 | { 240 | type: QueryTypes.SELECT, 241 | raw: true, 242 | } 243 | ) as IColumnMetadataMSSQL[]; 244 | 245 | for (const column of columns) { 246 | // Unknown data type 247 | if (!this.mapDbTypeToSequelize(column.DATA_TYPE)) { 248 | warnUnknownMappingForDataType(column.DATA_TYPE); 249 | } 250 | 251 | const columnMetadata: IColumnMetadata = { 252 | name: column.COLUMN_NAME, 253 | originName: column.COLUMN_NAME, 254 | type: column.DATA_TYPE, 255 | typeExt: column.DATA_TYPE, 256 | ...this.mapDbTypeToSequelize(column.DATA_TYPE) && { 257 | dataType: 'DataType.' + 258 | this.mapDbTypeToSequelize(column.DATA_TYPE).key 259 | .split(' ')[0], // avoids 'DOUBLE PRECISION' key to include PRECISION in the mapping 260 | }, 261 | allowNull: column.IS_NULLABLE.toUpperCase() === 'YES' && 262 | column.CONSTRAINT_TYPE?.toUpperCase() !== 'PRIMARY KEY', 263 | primaryKey: column.CONSTRAINT_TYPE?.toUpperCase() === 'PRIMARY KEY', 264 | autoIncrement: column.IS_IDENTITY === 'YES', 265 | indices: [], 266 | comment: column.COLUMN_COMMENT ?? undefined, 267 | }; 268 | 269 | // Additional data type information 270 | switch (column.DATA_TYPE) { 271 | case 'decimal': 272 | case 'numeric': 273 | case 'float': 274 | case 'double': 275 | columnMetadata.dataType += 276 | generatePrecisionSignature(column.NUMERIC_PRECISION, column.NUMERIC_SCALE); 277 | break; 278 | 279 | case 'datetime2': 280 | columnMetadata.dataType += generatePrecisionSignature(column.DATETIME_PRECISION); 281 | break; 282 | 283 | case 'char': 284 | case 'nchar': 285 | case 'varchar': 286 | case 'nvarchar': 287 | columnMetadata.dataType += generatePrecisionSignature(column.CHARACTER_MAXIMUM_LENGTH); 288 | break; 289 | } 290 | 291 | columnsMetadata.push(columnMetadata); 292 | } 293 | 294 | return columnsMetadata; 295 | } 296 | 297 | protected async fetchColumnIndexMetadata( 298 | connection: Sequelize, 299 | config: IConfig, 300 | table: string, 301 | column: string 302 | ): Promise { 303 | const indicesMetadata: IIndexMetadata[] = []; 304 | 305 | const query = ` 306 | SELECT 307 | c.column_id, 308 | OBJECT_NAME(i.[object_id]) TableName, 309 | i.name IndexName, 310 | c.name ColumnName, 311 | ic.is_included_column, 312 | i.index_id, 313 | i.is_unique, 314 | i.data_space_id, 315 | i.ignore_dup_key, 316 | i.is_primary_key, 317 | i.is_unique_constraint, 318 | i.type, 319 | i.type_desc 320 | FROM sys.indexes i 321 | JOIN sys.index_columns ic 322 | ON ic.object_id = i.object_id AND i.index_id = ic.index_id 323 | JOIN sys.columns c 324 | ON ic.object_id = c.object_id AND ic.column_id = c.column_id 325 | JOIN sys.tables t 326 | ON t.object_id = c.object_id 327 | WHERE t.object_id = object_id(N'${table}') AND c.name=N'${column}' 328 | ORDER BY ic.column_id; 329 | `; 330 | 331 | const indices = await connection.query( 332 | query, 333 | { 334 | type: QueryTypes.SELECT, 335 | raw: true, 336 | } 337 | ) as IIndexMetadataMSSQL[]; 338 | 339 | for (const index of indices) { 340 | indicesMetadata.push({ 341 | name: index.IndexName, 342 | unique: index.is_unique, 343 | }); 344 | } 345 | 346 | return indicesMetadata; 347 | } 348 | 349 | } 350 | -------------------------------------------------------------------------------- /src/dialects/DialectMariaDB.ts: -------------------------------------------------------------------------------- 1 | import { QueryTypes, AbstractDataTypeConstructor, IndexMethod } from 'sequelize'; 2 | import { Sequelize, DataType } from 'sequelize-typescript'; 3 | import { IConfig } from '../config'; 4 | import { IColumnMetadata, Dialect, IIndexMetadata, ITable } from './Dialect'; 5 | import { generatePrecisionSignature, warnUnknownMappingForDataType } from './utils'; 6 | 7 | interface ITableRow { 8 | table_name: string; 9 | table_comment?: string; 10 | } 11 | 12 | interface IColumnMetadataMariaDB { 13 | TABLE_CATALOG: string; 14 | TABLE_SCHEMA: string; 15 | TABLE_NAME: string; 16 | COLUMN_NAME: string; 17 | ORDINAL_POSITION?: number; 18 | COLUMN_DEFAULT?: string; 19 | IS_NULLABLE: string; 20 | DATA_TYPE: string; 21 | CHARACTER_MAXIMUM_LENGTH: number; 22 | CHARACTER_OCTET_LENGTH?: string; 23 | NUMERIC_PRECISION?: number; 24 | NUMERIC_SCALE?: number; 25 | DATETIME_PRECISION?: string; 26 | CHARACTER_SET_NAME?: string; 27 | COLLATION_NAME?: string; 28 | COLUMN_TYPE: string; 29 | COLUMN_KEY: string; 30 | EXTRA: string; 31 | PRIVILEGES: string; 32 | COLUMN_COMMENT: string; 33 | TABLE_COMMENT: string; 34 | GENERATION_EXPRESSION: string; 35 | } 36 | 37 | interface IIndexMetadataMariaDB { 38 | INDEX_NAME: string; // The name of the index. If the index is the primary key, the name is always PRIMARY. 39 | NON_UNIQUE: number | null; // 0 if the index cannot contain duplicates, 1 if it can 40 | INDEX_SCHEMA: string | null; // The name of the schema (database) to which the index belongs. 41 | SEQ_IN_INDEX: number | null; // The column sequence number in the index, starting with 1. 42 | COLLATION: string | null; // How the column is sorted in the index. This can have values A (ascending), D (descending), or NULL (not sorted). 43 | CARDINALITY: number | null; // An estimate of the number of unique values in the index. 44 | SUB_PART: string | null; // The index prefix. That is, the number of indexed characters if the column is only partly indexed, NULL if the entire column is indexed. 45 | PACKED: string | null;// Indicates how the key is packed. NULL if it is not. 46 | NULLABLE: string | null; // Contains YES if the column may contain NULL values and '' if not. 47 | INDEX_TYPE: IndexMethod | null; // The index method used (BTREE, FULLTEXT, HASH, RTREE). 48 | COMMENT: string | null; 49 | INDEX_COMMENT: string | null; 50 | } 51 | 52 | const sequelizeDataTypesMap: { [key: string]: AbstractDataTypeConstructor } = { 53 | bigint: DataType.BIGINT, 54 | int: DataType.INTEGER, 55 | smallint: DataType.SMALLINT, 56 | mediumint: DataType.MEDIUMINT, 57 | tinyint: DataType.TINYINT, 58 | decimal: DataType.DECIMAL, 59 | float: DataType.FLOAT, 60 | double: DataType.DOUBLE, 61 | bit: DataType.INTEGER, 62 | varchar: DataType.STRING, 63 | char: DataType.CHAR, 64 | text: DataType.STRING, 65 | tinytext: DataType.STRING, 66 | mediumtext: DataType.STRING, 67 | longtext: DataType.STRING, 68 | date: DataType.DATEONLY, 69 | datetime: DataType.DATE, 70 | time: DataType.TIME, 71 | timestamp: DataType.DATE, 72 | year: DataType.INTEGER, 73 | enum: DataType.ENUM, 74 | set: DataType.STRING, 75 | binary: DataType.BLOB, 76 | blob: DataType.BLOB, 77 | tinyblob: DataType.BLOB, 78 | mediumblob: DataType.BLOB, 79 | longblob: DataType.BLOB, 80 | point: DataType.GEOMETRY, 81 | multipoint: DataType.GEOMETRY, 82 | linestring: DataType.GEOMETRY, 83 | multilinestring: DataType.GEOMETRY, 84 | polygon: DataType.GEOMETRY, 85 | multipolygon: DataType.GEOMETRY, 86 | geometry: DataType.GEOMETRY, 87 | geometrycollection: DataType.GEOMETRY, 88 | json: DataType.JSON, 89 | }; 90 | 91 | const jsDataTypesMap: { [key: string]: string } = { 92 | bigint: 'string', 93 | smallint: 'number', 94 | mediumint: 'number', 95 | tinyint: 'number', 96 | decimal: 'number', 97 | float: 'number', 98 | double: 'number', 99 | int: 'number', 100 | bit: 'number', 101 | varchar: 'string', 102 | char: 'string', 103 | mediumtext: 'string', 104 | tinytext: 'string', 105 | longtext: 'string', 106 | text: 'string', 107 | date: 'string', 108 | time: 'string', 109 | datetime: 'Date', 110 | timestamp: 'Date', 111 | year: 'number', 112 | enum: 'string', 113 | set: 'Array', 114 | binary: 'Uint8Array', 115 | blob: 'Uint8Array', 116 | tinyblob: 'Uint8Array', 117 | mediumblob: 'Uint8Array', 118 | longblob: 'Uint8Array', 119 | point: 'object', 120 | multipoint: 'object', 121 | linestring: 'object', 122 | multilinestring: 'object', 123 | polygon: 'object', 124 | multipolygon: 'object', 125 | geometry: 'object', 126 | geometrycollection: 'object', 127 | json: 'string', 128 | }; 129 | 130 | const defaultValuesMap: { [key: string]: string } = { 131 | 'uuid()': 'DataType.UUIDV4', 132 | 'CURRENT_TIMESTAMP': 'DataType.NOW', 133 | }; 134 | 135 | /** 136 | * Dialect for MariaDB 137 | * @class DialectMariaDB 138 | */ 139 | export class DialectMariaDB extends Dialect { 140 | 141 | constructor() { 142 | super('mariadb'); 143 | } 144 | 145 | /** 146 | * Map database data type to sequelize data type 147 | * @param {string} dbType 148 | * @returns {string} 149 | */ 150 | public mapDbTypeToSequelize(dbType: string): AbstractDataTypeConstructor { 151 | return sequelizeDataTypesMap[dbType]; 152 | } 153 | 154 | /** 155 | * Map database data type to javascript data type 156 | * @param {string} dbType 157 | * @returns {string 158 | */ 159 | public mapDbTypeToJs(dbType: string): string { 160 | return jsDataTypesMap[dbType]; 161 | } 162 | 163 | /** 164 | * Map database default values to Sequelize type (e.g. uuid() => DataType.UUIDV4). 165 | * @param {string} v 166 | * @returns {string} 167 | */ 168 | public mapDefaultValueToSequelize(v: string): string { 169 | return defaultValuesMap.hasOwnProperty(v) ? defaultValuesMap[v] : v; 170 | } 171 | 172 | /** 173 | * Fetch table names for the provided database/schema 174 | * @param {Sequelize} connection 175 | * @param {IConfig} config 176 | * @returns {Promise} 177 | */ 178 | protected async fetchTables( 179 | connection: Sequelize, 180 | config: IConfig 181 | ): Promise { 182 | const query = ` 183 | SELECT 184 | table_name AS table_name, 185 | table_comment AS table_comment 186 | FROM information_schema.tables 187 | WHERE table_schema = '${config.connection.database}' 188 | ${config.metadata?.noViews ? 'AND table_type <> \'VIEW\'' : ''}; 189 | `; 190 | 191 | const tables: ITable[] = (await connection.query( 192 | query, 193 | { 194 | type: QueryTypes.SELECT, 195 | raw: true, 196 | } 197 | ) as ITableRow[]).map(({ table_name, table_comment }) => { 198 | const t: ITable = { 199 | name: table_name, 200 | comment: table_comment ?? undefined, 201 | }; 202 | 203 | return t; 204 | }); 205 | 206 | return tables; 207 | } 208 | 209 | /** 210 | * Fetch columns metadata for the provided schema and table 211 | * @param {Sequelize} connection 212 | * @param {IConfig} config 213 | * @param {string} table 214 | * @returns {Promise} 215 | */ 216 | protected async fetchColumnsMetadata( 217 | connection: Sequelize, 218 | config: IConfig, 219 | table: string 220 | ): Promise { 221 | const columnsMetadata: IColumnMetadata[] = []; 222 | 223 | const query = ` 224 | SELECT 225 | c.ORDINAL_POSITION, 226 | c.TABLE_SCHEMA, 227 | c.TABLE_NAME, 228 | c.COLUMN_NAME, 229 | c.DATA_TYPE, 230 | c.COLUMN_TYPE, 231 | c.CHARACTER_MAXIMUM_LENGTH, 232 | c.NUMERIC_PRECISION, 233 | c.NUMERIC_SCALE, 234 | c.DATETIME_PRECISION, 235 | c.IS_NULLABLE, 236 | c.COLUMN_KEY, 237 | c.EXTRA, 238 | c.COLUMN_COMMENT, 239 | t.TABLE_COMMENT 240 | FROM information_schema.columns c 241 | INNER JOIN information_schema.tables t 242 | ON c.TABLE_SCHEMA = t.TABLE_SCHEMA AND c.TABLE_NAME = t.TABLE_NAME 243 | WHERE c.TABLE_SCHEMA='${config.connection.database}' AND c.TABLE_NAME = '${table}' 244 | ORDER BY c.ORDINAL_POSITION; 245 | `; 246 | 247 | const columns = await connection.query( 248 | query, 249 | { 250 | type: QueryTypes.SELECT, 251 | raw: true, 252 | } 253 | ) as IColumnMetadataMariaDB[]; 254 | 255 | for (const column of columns) { 256 | // Unknown data type 257 | if (!this.mapDbTypeToSequelize(column.DATA_TYPE)) { 258 | warnUnknownMappingForDataType(column.DATA_TYPE); 259 | } 260 | 261 | const columnMetadata: IColumnMetadata = { 262 | name: column.COLUMN_NAME, 263 | originName: column.COLUMN_NAME, 264 | type: column.DATA_TYPE, 265 | typeExt: column.COLUMN_TYPE, 266 | ...this.mapDbTypeToSequelize(column.DATA_TYPE) && { dataType: 'DataType.' + 267 | this.mapDbTypeToSequelize(column.DATA_TYPE).key 268 | .split(' ')[0], // avoids 'DOUBLE PRECISION' key to include PRECISION in the mapping 269 | }, 270 | allowNull: column.IS_NULLABLE === 'YES', 271 | primaryKey: column.COLUMN_KEY === 'PRI', 272 | autoIncrement: column.EXTRA === 'auto_increment', 273 | indices: [], 274 | comment: column.COLUMN_COMMENT, 275 | }; 276 | 277 | // Additional data type informations 278 | switch (column.DATA_TYPE) { 279 | case 'decimal': 280 | case 'numeric': 281 | case 'float': 282 | case 'double': 283 | columnMetadata.dataType += 284 | generatePrecisionSignature(column.NUMERIC_PRECISION, column.NUMERIC_SCALE); 285 | break; 286 | 287 | case 'datetime': 288 | case 'timestamp': 289 | columnMetadata.dataType += generatePrecisionSignature(column.DATETIME_PRECISION); 290 | break; 291 | 292 | case 'char': 293 | case 'varchar': 294 | columnMetadata.dataType += generatePrecisionSignature(column.CHARACTER_MAXIMUM_LENGTH); 295 | break; 296 | } 297 | 298 | // ENUM: add values to data type -> DataType.ENUM('v1', 'v2') 299 | if (column.DATA_TYPE === 'enum') { 300 | columnMetadata.dataType += columnMetadata.typeExt.match(/\(.*\)/)![0]; 301 | } 302 | 303 | columnsMetadata.push(columnMetadata); 304 | } 305 | 306 | return columnsMetadata; 307 | } 308 | 309 | /** 310 | * Fetch index metadata for the provided table and column 311 | * @param {Sequelize} connection 312 | * @param {IConfig} config 313 | * @param {string} table 314 | * @param {string} column 315 | * @returns {Promise} 316 | */ 317 | protected async fetchColumnIndexMetadata( 318 | connection: Sequelize, 319 | config: IConfig, 320 | table: string, 321 | column: string 322 | ): Promise { 323 | const indicesMetadata: IIndexMetadata[] = []; 324 | 325 | const query = ` 326 | SELECT * 327 | FROM information_schema.statistics s 328 | WHERE TABLE_SCHEMA = '${config.connection.database}' AND TABLE_NAME = '${table}' 329 | AND COLUMN_NAME = '${column}'; 330 | `; 331 | 332 | const indices = await connection.query( 333 | query, 334 | { 335 | type: QueryTypes.SELECT, 336 | raw: true, 337 | } 338 | ) as IIndexMetadataMariaDB[]; 339 | 340 | for (const index of indices) { 341 | indicesMetadata.push({ 342 | name: index.INDEX_NAME!, 343 | using: index.INDEX_TYPE!, 344 | collation: index.COLLATION, 345 | seq: index.SEQ_IN_INDEX!, 346 | unique: index.NON_UNIQUE === 0, 347 | }); 348 | } 349 | 350 | return indicesMetadata; 351 | } 352 | } 353 | -------------------------------------------------------------------------------- /src/dialects/DialectMySQL.ts: -------------------------------------------------------------------------------- 1 | import {QueryTypes, AbstractDataTypeConstructor, IndexMethod, col} from 'sequelize'; 2 | import { Sequelize, DataType } from 'sequelize-typescript'; 3 | import { IConfig } from '../config'; 4 | import { IColumnMetadata, Dialect, IIndexMetadata, ITable } from './Dialect'; 5 | import { warnUnknownMappingForDataType, generatePrecisionSignature } from './utils'; 6 | 7 | interface ITableRow { 8 | table_name: string; 9 | table_comment?: string; 10 | } 11 | 12 | interface IColumnMetadataMySQL { 13 | TABLE_CATALOG: string; 14 | TABLE_SCHEMA: string; 15 | TABLE_NAME: string; 16 | COLUMN_NAME: string; 17 | ORDINAL_POSITION?: number; 18 | IS_NULLABLE: string; 19 | DATA_TYPE: string; 20 | CHARACTER_MAXIMUM_LENGTH: number; 21 | CHARACTER_OCTET_LENGTH?: string; 22 | NUMERIC_PRECISION?: number; 23 | NUMERIC_SCALE?: number; 24 | DATETIME_PRECISION?: string; 25 | CHARACTER_SET_NAME?: string; 26 | COLLATION_NAME?: string; 27 | COLUMN_TYPE: string; 28 | COLUMN_KEY: string; 29 | EXTRA: string; 30 | COLUMN_DEFAULT: null | string; 31 | PRIVILEGES: string; 32 | COLUMN_COMMENT: string; 33 | TABLE_COMMENT: string; 34 | GENERATION_EXPRESSION: string; 35 | } 36 | 37 | interface IIndexMetadataMySQL { 38 | INDEX_NAME: string; // The name of the index. If the index is the primary key, the name is always PRIMARY. 39 | NON_UNIQUE: number | null; // 0 if the index cannot contain duplicates, 1 if it can 40 | INDEX_SCHEMA: string | null; // The name of the schema (database) to which the index belongs. 41 | SEQ_IN_INDEX: number | null; // The column sequence number in the index, starting with 1. 42 | COLLATION: string | null; // How the column is sorted in the index. This can have values A (ascending), D (descending), or NULL (not sorted). 43 | CARDINALITY: number | null; // An estimate of the number of unique values in the index. 44 | SUB_PART: string | null; // The index prefix. That is, the number of indexed characters if the column is only partly indexed, NULL if the entire column is indexed. 45 | PACKED: string | null;// Indicates how the key is packed. NULL if it is not. 46 | NULLABLE: string | null; // Contains YES if the column may contain NULL values and '' if not. 47 | INDEX_TYPE: IndexMethod | null; // The index method used (BTREE, FULLTEXT, HASH, RTREE). 48 | COMMENT: string | null; 49 | INDEX_COMMENT: string | null; 50 | } 51 | 52 | const sequelizeDataTypesMap: { [key: string]: AbstractDataTypeConstructor } = { 53 | bigint: DataType.BIGINT, 54 | int: DataType.INTEGER, 55 | smallint: DataType.SMALLINT, 56 | mediumint: DataType.MEDIUMINT, 57 | tinyint: DataType.TINYINT, 58 | decimal: DataType.DECIMAL, 59 | float: DataType.FLOAT, 60 | double: DataType.DOUBLE, 61 | bit: DataType.INTEGER, 62 | varchar: DataType.STRING, 63 | char: DataType.CHAR, 64 | text: DataType.STRING, 65 | tinytext: DataType.STRING, 66 | mediumtext: DataType.STRING, 67 | longtext: DataType.STRING, 68 | date: DataType.DATEONLY, 69 | datetime: DataType.DATE, 70 | time: DataType.TIME, 71 | timestamp: DataType.DATE, 72 | year: DataType.INTEGER, 73 | enum: DataType.ENUM, 74 | set: DataType.STRING, 75 | binary: DataType.BLOB, 76 | blob: DataType.BLOB, 77 | tinyblob: DataType.BLOB, 78 | mediumblob: DataType.BLOB, 79 | longblob: DataType.BLOB, 80 | point: DataType.GEOMETRY, 81 | multipoint: DataType.GEOMETRY, 82 | linestring: DataType.GEOMETRY, 83 | multilinestring: DataType.GEOMETRY, 84 | polygon: DataType.GEOMETRY, 85 | multipolygon: DataType.GEOMETRY, 86 | geometry: DataType.GEOMETRY, 87 | geometrycollection: DataType.GEOMETRY, 88 | json: DataType.JSON, 89 | }; 90 | 91 | const jsDataTypesMap: { [key: string]: string } = { 92 | bigint: 'number', 93 | smallint: 'number', 94 | mediumint: 'number', 95 | tinyint: 'number', 96 | decimal: 'string', 97 | float: 'number', 98 | double: 'number', 99 | int: 'number', 100 | bit: 'number', 101 | varchar: 'string', 102 | char: 'string', 103 | mediumtext: 'string', 104 | tinytext: 'string', 105 | longtext: 'string', 106 | text: 'string', 107 | date: 'string', 108 | time: 'string', 109 | datetime: 'Date', 110 | timestamp: 'Date', 111 | year: 'number', 112 | enum: 'string', 113 | set: 'string', 114 | binary: 'Uint8Array', 115 | blob: 'Uint8Array', 116 | tinyblob: 'Uint8Array', 117 | mediumblob: 'Uint8Array', 118 | longblob: 'Uint8Array', 119 | point: 'object', 120 | multipoint: 'object', 121 | linestring: 'object', 122 | multilinestring: 'object', 123 | polygon: 'object', 124 | multipolygon: 'object', 125 | geometry: 'object', 126 | geometrycollection: 'object', 127 | json: 'object', 128 | }; 129 | 130 | const defaultValuesMap: { [key: string]: string } = { 131 | 'uuid()': 'DataType.UUIDV4', 132 | 'CURRENT_TIMESTAMP': 'DataType.NOW', 133 | }; 134 | 135 | const getDefaultValue = (columnDefault: string | null): any => { 136 | if (!columnDefault) { 137 | return null; 138 | } 139 | 140 | // Check if it is MySQL binary representation (e.g. b'100') 141 | const regex = new RegExp(/b\'([01]+)\'/g); 142 | const binaryStringCheck = regex.exec(columnDefault); 143 | 144 | if (binaryStringCheck) { 145 | const parsed = parseInt(binaryStringCheck[1], 2); 146 | 147 | if (parsed !== null) { 148 | return parsed; 149 | } 150 | } 151 | 152 | return columnDefault; 153 | } 154 | 155 | /** 156 | * Dialect for MySQL 157 | * @class DialectMySQL 158 | */ 159 | export class DialectMySQL extends Dialect { 160 | 161 | constructor() { 162 | super('mysql'); 163 | } 164 | 165 | /** 166 | * Map database data type to sequelize data type 167 | * @param {string} dbType 168 | * @returns {string} 169 | */ 170 | public mapDbTypeToSequelize(dbType: string): AbstractDataTypeConstructor { 171 | return sequelizeDataTypesMap[dbType]; 172 | } 173 | 174 | /** 175 | * Map database data type to javascript data type 176 | * @param {string} dbType 177 | * @returns {string} 178 | */ 179 | public mapDbTypeToJs(dbType: string): string { 180 | return jsDataTypesMap[dbType]; 181 | } 182 | 183 | /** 184 | * Map database default values to Sequelize type (e.g. uuid() => DataType.UUIDV4). 185 | * @param {string} v 186 | * @returns {string} 187 | */ 188 | public mapDefaultValueToSequelize(v: string): string { 189 | return defaultValuesMap.hasOwnProperty(v) ? defaultValuesMap[v] : v; 190 | } 191 | 192 | /** 193 | * Fetch table names for the provided database/schema 194 | * @param {Sequelize} connection 195 | * @param {IConfig} config 196 | * @returns {Promise} 197 | */ 198 | protected async fetchTables( 199 | connection: Sequelize, 200 | config: IConfig 201 | ): Promise { 202 | const query = ` 203 | SELECT 204 | table_name AS table_name, 205 | table_comment AS table_comment 206 | FROM information_schema.tables 207 | WHERE table_schema = '${config.connection.database}' 208 | ${config.metadata?.noViews ? 'AND table_type <> \'VIEW\'' : ''}; 209 | `; 210 | 211 | const tables: ITable[] = (await connection.query( 212 | query, 213 | { 214 | type: QueryTypes.SELECT, 215 | raw: true, 216 | } 217 | ) as ITableRow[]).map(({ table_name, table_comment }) => { 218 | const t: ITable = { 219 | name: table_name, 220 | comment: table_comment ?? undefined, 221 | }; 222 | 223 | return t; 224 | }); 225 | 226 | return tables; 227 | } 228 | 229 | /** 230 | * Fetch columns metadata for the provided schema and table 231 | * @param {Sequelize} connection 232 | * @param {IConfig} config 233 | * @param {string} table 234 | * @returns {Promise} 235 | */ 236 | protected async fetchColumnsMetadata( 237 | connection: Sequelize, 238 | config: IConfig, 239 | table: string 240 | ): Promise { 241 | const columnsMetadata: IColumnMetadata[] = []; 242 | 243 | const query = ` 244 | SELECT 245 | c.ORDINAL_POSITION, 246 | c.TABLE_SCHEMA, 247 | c.TABLE_NAME, 248 | c.COLUMN_NAME, 249 | c.DATA_TYPE, 250 | c.COLUMN_TYPE, 251 | c.CHARACTER_MAXIMUM_LENGTH, 252 | c.NUMERIC_PRECISION, 253 | c.NUMERIC_SCALE, 254 | c.DATETIME_PRECISION, 255 | c.IS_NULLABLE, 256 | c.COLUMN_KEY, 257 | c.EXTRA, 258 | c.COLUMN_DEFAULT, 259 | c.COLUMN_COMMENT, 260 | t.TABLE_COMMENT 261 | FROM information_schema.columns c 262 | INNER JOIN information_schema.tables t 263 | ON c.TABLE_SCHEMA = t.TABLE_SCHEMA AND c.TABLE_NAME = t.TABLE_NAME 264 | WHERE c.TABLE_SCHEMA = '${config.connection.database}' AND c.TABLE_NAME = '${table}' 265 | ORDER BY c.ORDINAL_POSITION; 266 | `; 267 | 268 | const columns = await connection.query( 269 | query, 270 | { 271 | type: QueryTypes.SELECT, 272 | raw: true, 273 | } 274 | ) as IColumnMetadataMySQL[]; 275 | 276 | for (const column of columns) { 277 | // Unknown data type 278 | if (!this.mapDbTypeToSequelize(column.DATA_TYPE)) { 279 | warnUnknownMappingForDataType(column.DATA_TYPE); 280 | } 281 | 282 | const columnMetadata: IColumnMetadata = { 283 | name: column.COLUMN_NAME, 284 | originName: column.COLUMN_NAME, 285 | type: column.DATA_TYPE, 286 | typeExt: column.COLUMN_TYPE, 287 | ...this.mapDbTypeToSequelize(column.DATA_TYPE) && { 288 | dataType: 'DataType.' + 289 | this.mapDbTypeToSequelize(column.DATA_TYPE).key 290 | .split(' ')[0], // avoids 'DOUBLE PRECISION' key to include PRECISION in the mapping 291 | }, 292 | allowNull: column.IS_NULLABLE === 'YES', 293 | primaryKey: column.COLUMN_KEY === 'PRI', 294 | autoIncrement: column.EXTRA === 'auto_increment', 295 | indices: [], 296 | comment: column.COLUMN_COMMENT, 297 | ...column.COLUMN_DEFAULT && { defaultValue: getDefaultValue(column.COLUMN_DEFAULT) }, 298 | }; 299 | 300 | // Additional data type informations 301 | switch (column.DATA_TYPE) { 302 | case 'decimal': 303 | case 'numeric': 304 | case 'float': 305 | case 'double': 306 | columnMetadata.dataType += 307 | generatePrecisionSignature(column.NUMERIC_PRECISION, column.NUMERIC_SCALE); 308 | break; 309 | 310 | case 'datetime': 311 | case 'timestamp': 312 | columnMetadata.dataType += generatePrecisionSignature(column.DATETIME_PRECISION); 313 | break; 314 | 315 | case 'char': 316 | case 'varchar': 317 | columnMetadata.dataType += generatePrecisionSignature(column.CHARACTER_MAXIMUM_LENGTH); 318 | break; 319 | } 320 | 321 | // ENUM: add values to data type -> DataType.ENUM('v1', 'v2') 322 | if (column.DATA_TYPE === 'enum') { 323 | columnMetadata.dataType += columnMetadata.typeExt.match(/\(.*\)/)![0]; 324 | } 325 | 326 | columnsMetadata.push(columnMetadata); 327 | } 328 | 329 | return columnsMetadata; 330 | } 331 | 332 | /** 333 | * Fetch index metadata for the provided table and column 334 | * @param {Sequelize} connection 335 | * @param {IConfig} config 336 | * @param {string} table 337 | * @param {string} column 338 | * @returns {Promise} 339 | */ 340 | protected async fetchColumnIndexMetadata( 341 | connection: Sequelize, 342 | config: IConfig, 343 | table: string, 344 | column: string 345 | ): Promise { 346 | const indicesMetadata: IIndexMetadata[] = []; 347 | 348 | const query = ` 349 | SELECT * 350 | FROM information_schema.statistics s 351 | WHERE TABLE_SCHEMA = '${config.connection.database}' AND TABLE_NAME = '${table}' 352 | AND COLUMN_NAME = '${column}'; 353 | `; 354 | 355 | const indices = await connection.query( 356 | query, 357 | { 358 | type: QueryTypes.SELECT, 359 | raw: true, 360 | } 361 | ) as IIndexMetadataMySQL[]; 362 | 363 | for (const index of indices) { 364 | indicesMetadata.push({ 365 | name: index.INDEX_NAME!, 366 | using: index.INDEX_TYPE!, 367 | collation: index.COLLATION, 368 | seq: index.SEQ_IN_INDEX!, 369 | unique: index.NON_UNIQUE === 0, 370 | }); 371 | } 372 | 373 | return indicesMetadata; 374 | } 375 | } 376 | -------------------------------------------------------------------------------- /src/dialects/DialectPostgres.ts: -------------------------------------------------------------------------------- 1 | import { QueryTypes, AbstractDataTypeConstructor } from 'sequelize'; 2 | import { Sequelize, DataType } from 'sequelize-typescript'; 3 | import { IConfig } from '../config'; 4 | import { IColumnMetadata, IIndexMetadata, Dialect, ITable } from './Dialect'; 5 | import { generatePrecisionSignature, warnUnknownMappingForDataType } from './utils'; 6 | 7 | interface ITableRow { 8 | table_name: string; 9 | table_comment?: string; 10 | } 11 | 12 | interface IColumnMetadataPostgres { 13 | is_sequence: boolean; 14 | is_primary: boolean; 15 | table_catalog: string; 16 | table_schema: string; 17 | table_name: string; 18 | column_name: string; 19 | ordinal_position: number; 20 | column_default: string; 21 | is_nullable: string; 22 | data_type: string; 23 | character_maximum_length: number; 24 | character_octet_length: number; 25 | numeric_precision: number; 26 | numeric_precision_radix: number; 27 | numeric_scale: number; 28 | datetime_precision: number; 29 | interval_type: string; 30 | interval_precision: number; 31 | character_set_catalog: string; 32 | character_set_schema: string; 33 | character_set_name: string; 34 | collation_catalog: string; 35 | collation_schema: string; 36 | collation_name: string; 37 | domain_catalog: string; 38 | domain_schema: string; 39 | domain_name: string; 40 | udt_catalog: string; 41 | udt_schema: string; 42 | udt_name: string; 43 | scope_catalog: string; 44 | scope_schema: string; 45 | scope_name: string; 46 | maximum_cardinality: number; 47 | dtd_identifier: string; 48 | is_self_referencing: string; 49 | is_identity: string; 50 | identity_generation: string; 51 | identity_start: string; 52 | identity_increment: string; 53 | identity_maximum: string; 54 | identity_minimum: string; 55 | identity_cycle: string; 56 | is_generated: string; 57 | generation_expression: string; 58 | is_updatable: string; 59 | description: string | null; 60 | } 61 | 62 | interface IIndexMetadataPostgres { 63 | index_name: string; 64 | index_type: string; 65 | is_primary: boolean; 66 | is_unique: boolean; 67 | is_clustered: boolean; 68 | column_name: string; 69 | ordinal_position: string; 70 | } 71 | 72 | const sequelizeDataTypesMap: { [key: string]: AbstractDataTypeConstructor } = { 73 | int2: DataType.INTEGER, 74 | int4: DataType.INTEGER, 75 | int8: DataType.BIGINT, 76 | numeric: DataType.DECIMAL, 77 | float4: DataType.FLOAT, 78 | float8: DataType.DOUBLE, 79 | money: DataType.NUMBER, 80 | varchar: DataType.STRING, 81 | bpchar: DataType.STRING, 82 | text: DataType.STRING, 83 | bytea: DataType.BLOB, 84 | timestamp: DataType.DATE, 85 | timestamptz: DataType.DATE, 86 | date: DataType.STRING, 87 | time: DataType.STRING, 88 | timetz: DataType.STRING, 89 | // interval: DataType.STRING, 90 | bool: DataType.BOOLEAN, 91 | point: DataType.GEOMETRY, 92 | line: DataType.GEOMETRY, 93 | lseg: DataType.GEOMETRY, 94 | box: DataType.GEOMETRY, 95 | path: DataType.GEOMETRY, 96 | polygon: DataType.GEOMETRY, 97 | circle: DataType.GEOMETRY, 98 | geometry: DataType.GEOMETRY, 99 | cidr: DataType.STRING, 100 | inet: DataType.STRING, 101 | macaddr: DataType.STRING, 102 | macaddr8: DataType.STRING, 103 | bit: DataType.STRING, 104 | varbit: DataType.STRING, 105 | uuid: DataType.UUID, 106 | xml: DataType.STRING, 107 | json: DataType.JSON, 108 | jsonb: DataType.JSONB, 109 | jsonpath: DataType.JSON, 110 | } 111 | 112 | const jsDataTypesMap: { [key: string]: string } = { 113 | int2: 'number', 114 | int4: 'number', 115 | int8: 'string', 116 | numeric: 'string', 117 | float4: 'number', 118 | float8: 'number', 119 | money: 'string', 120 | varchar: 'string', 121 | bpchar: 'string', 122 | text: 'string', 123 | bytea: 'Uint8Array', 124 | timestamp: 'Date', 125 | timestamptz: 'Date', 126 | date: 'string', 127 | time: 'string', 128 | timetz: 'string', 129 | interval: 'object', 130 | bool: 'boolean', 131 | point: 'object', 132 | line: 'object', 133 | lseg: 'object', 134 | box: 'object', 135 | path: 'object', 136 | polygon: 'object', 137 | circle: 'object', 138 | geometry: 'object', 139 | cidr: 'string', 140 | inet: 'string', 141 | macaddr: 'string', 142 | macaddr8: 'string', 143 | bit: 'string', 144 | varbit: 'string', 145 | uuid: 'string', 146 | xml: 'string', 147 | json: 'object', 148 | jsonb: 'object', 149 | jsonpath: 'object', 150 | } 151 | 152 | /** 153 | * Dialect for Postgres 154 | * @class DialectPostgres 155 | */ 156 | export class DialectPostgres extends Dialect { 157 | 158 | constructor() { 159 | super('postgres'); 160 | } 161 | 162 | /** 163 | * Map database data type to sequelize data type 164 | * @param {string} dbType 165 | * @returns {string} 166 | */ 167 | public mapDbTypeToSequelize(dbType: string): AbstractDataTypeConstructor { 168 | return sequelizeDataTypesMap[dbType]; 169 | } 170 | 171 | /** 172 | * Map database data type to javascript data type 173 | * @param {string} dbType 174 | * @returns {string} 175 | */ 176 | public mapDbTypeToJs(dbType: string): string { 177 | return jsDataTypesMap[dbType]; 178 | } 179 | 180 | /** 181 | * Map database default values to Sequelize type (e.g. uuid() => DataType.UUIDV4). 182 | * @param {string} v 183 | * @returns {string} 184 | */ 185 | public mapDefaultValueToSequelize(v: string): string { 186 | return v; 187 | } 188 | 189 | /** 190 | * Fetch table names for the provided database/schema 191 | * @param {Sequelize} connection 192 | * @param {IConfig} config 193 | * @returns {Promise} 194 | */ 195 | protected async fetchTables( 196 | connection: Sequelize, 197 | config: IConfig 198 | ): Promise { 199 | const query = ` 200 | SELECT 201 | t.table_name AS table_name, 202 | obj_description(pc.oid) AS table_comment 203 | FROM information_schema.tables t 204 | JOIN pg_class pc 205 | ON t.table_name = pc.relname 206 | WHERE t.table_schema='${config.connection.schema}' AND pc.relkind = 'r'; 207 | `; 208 | 209 | const tables: ITable[] = (await connection.query( 210 | query, 211 | { 212 | type: QueryTypes.SELECT, 213 | raw: true, 214 | } 215 | ) as ITableRow[]).map(({ table_name, table_comment }) => { 216 | const t: ITable = { 217 | name: table_name, 218 | comment: table_comment ?? undefined, 219 | }; 220 | 221 | return t; 222 | }); 223 | 224 | return tables; 225 | } 226 | 227 | /** 228 | * Fetch columns metadata for the provided schema and table 229 | * @param {Sequelize} connection 230 | * @param {IConfig} config 231 | * @param {string} table 232 | * @returns {Promise} 233 | */ 234 | protected async fetchColumnsMetadata( 235 | connection: Sequelize, 236 | config: IConfig, 237 | table: string 238 | ): Promise { 239 | const columnsMetadata: IColumnMetadata[] = []; 240 | 241 | const query = ` 242 | SELECT 243 | CASE WHEN (seq.sequence_name IS NOT NULL) THEN TRUE ELSE FALSE END AS is_sequence, 244 | EXISTS( -- primary key 245 | SELECT 246 | x.indisprimary 247 | FROM pg_attribute a 248 | LEFT OUTER JOIN pg_index x 249 | ON a.attnum = ANY (x.indkey) AND a.attrelid = x.indrelid 250 | WHERE a.attrelid = '${config.connection.schema}.\"${table}\"'::regclass AND a.attnum > 0 251 | AND c.ordinal_position = a.attnum AND x.indisprimary IS TRUE 252 | ) AS is_primary, 253 | c.*, 254 | pgd.description 255 | FROM information_schema.columns c 256 | INNER JOIN pg_catalog.pg_statio_all_tables as st 257 | ON c.table_schema = st.schemaname AND c.table_name = st.relname 258 | LEFT OUTER JOIN pg_catalog.pg_description pgd 259 | ON pgd.objoid = st.relid AND pgd.objsubid = c.ordinal_position 260 | LEFT OUTER JOIN ( -- Sequences (auto increment) metadata 261 | SELECT seqclass.relname AS sequence_name, 262 | pn.nspname AS schema_name, 263 | depclass.relname AS table_name, 264 | attrib.attname AS column_name 265 | FROM pg_class AS seqclass 266 | JOIN pg_sequence AS seq 267 | ON (seq.seqrelid = seqclass.relfilenode) 268 | JOIN pg_depend AS dep 269 | ON (seq.seqrelid = dep.objid) 270 | JOIN pg_class AS depclass 271 | ON (dep.refobjid = depclass.relfilenode) 272 | JOIN pg_attribute AS attrib 273 | ON (attrib.attnum = dep.refobjsubid AND attrib.attrelid = dep.refobjid) 274 | JOIN pg_namespace pn 275 | ON seqclass.relnamespace = pn.oid 276 | WHERE pn.nspname = '${config.connection.schema}' AND depclass.relname = '${table}' 277 | ) seq 278 | ON c.table_schema = seq.schema_name AND c.table_name = seq.table_name AND 279 | c.column_name = seq.column_name 280 | WHERE c.table_schema = '${config.connection.schema}' AND c.table_name = '${table}' 281 | ORDER BY c.ordinal_position; 282 | `; 283 | 284 | const columns = await connection.query( 285 | query, 286 | { 287 | type: QueryTypes.SELECT, 288 | raw: true, 289 | } 290 | ) as IColumnMetadataPostgres[]; 291 | 292 | for (const column of columns) { 293 | // Unknown data type 294 | if (!this.mapDbTypeToSequelize(column.udt_name)) { 295 | warnUnknownMappingForDataType(column.udt_name); 296 | } 297 | 298 | const columnMetadata: IColumnMetadata = { 299 | name: column.column_name, 300 | originName: column.column_name, 301 | type: column.udt_name, 302 | typeExt: column.data_type, 303 | ...this.mapDbTypeToSequelize(column.udt_name) && { 304 | dataType: 'DataType.' + 305 | this.mapDbTypeToSequelize(column.udt_name).key 306 | .split(' ')[0], // avoids 'DOUBLE PRECISION' key to include PRECISION in the mapping 307 | }, 308 | allowNull: column.is_nullable === 'YES' && !column.is_primary, 309 | primaryKey: column.is_primary, 310 | autoIncrement: column.is_sequence, 311 | indices: [], 312 | comment: column.description ?? undefined, 313 | }; 314 | if (column.column_default) { 315 | columnMetadata.defaultValue = `Sequelize.literal("${column.column_default.replace(/\"/g, '\\\"')}")`; 316 | } 317 | 318 | // Additional data type information 319 | switch (column.udt_name) { 320 | case 'decimal': 321 | case 'numeric': 322 | case 'float': 323 | case 'double': 324 | columnMetadata.dataType += 325 | generatePrecisionSignature(column.numeric_precision, column.numeric_scale); 326 | break; 327 | 328 | case 'timestamp': 329 | case 'timestampz': 330 | columnMetadata.dataType += generatePrecisionSignature(column.datetime_precision); 331 | break; 332 | 333 | case 'bpchar': 334 | case 'varchar': 335 | columnMetadata.dataType += generatePrecisionSignature(column.character_maximum_length); 336 | break; 337 | } 338 | 339 | columnsMetadata.push(columnMetadata); 340 | } 341 | 342 | return columnsMetadata; 343 | } 344 | 345 | /** 346 | * Fetch index metadata for the provided table and column 347 | * @param {Sequelize} connection 348 | * @param {IConfig} config 349 | * @param {string} table 350 | * @param {string} column 351 | * @returns {Promise} 352 | */ 353 | protected async fetchColumnIndexMetadata( 354 | connection: Sequelize, 355 | config: IConfig, 356 | table: string, 357 | column: string 358 | ): Promise { 359 | const indicesMetadata: IIndexMetadata[] = []; 360 | 361 | const query = ` 362 | SELECT pc.relname AS index_name, 363 | am.amname AS index_type, 364 | a.attname AS column_name, 365 | a.attnum AS ordinal_position, 366 | x.indisprimary AS is_primary, 367 | x.indisunique AS is_unique, 368 | x.indisclustered AS is_clustered 369 | FROM pg_attribute a 370 | INNER JOIN pg_index x 371 | ON a.attnum = ANY (x.indkey) AND a.attrelid = x.indrelid 372 | INNER JOIN pg_class pc 373 | ON x.indexrelid = pc.oid 374 | INNER JOIN pg_am am 375 | ON pc.relam = am.oid 376 | WHERE a.attrelid = '${config.connection.schema}.\"${table}\"'::regclass AND a.attnum > 0 377 | AND a.attname = '${column}'; 378 | `; 379 | 380 | const indices = await connection.query( 381 | query, 382 | { 383 | type: QueryTypes.SELECT, 384 | raw: true, 385 | } 386 | ) as IIndexMetadataPostgres[]; 387 | 388 | for (const index of indices) { 389 | indicesMetadata.push({ 390 | name: index.index_name, 391 | using: index.index_type, 392 | unique: index.is_unique, 393 | }); 394 | } 395 | 396 | return indicesMetadata; 397 | } 398 | } 399 | -------------------------------------------------------------------------------- /src/dialects/DialectSQLite.ts: -------------------------------------------------------------------------------- 1 | import { QueryTypes, AbstractDataTypeConstructor, IndexMethod } from 'sequelize'; 2 | import { Sequelize, DataType } from 'sequelize-typescript'; 3 | import { IConfig } from '../config'; 4 | import { IColumnMetadata, Dialect, IIndexMetadata, ITable } from './Dialect'; 5 | import { warnUnknownMappingForDataType } from './utils'; 6 | 7 | interface ITableRow { 8 | table_name: string; 9 | table_comment?: string; 10 | } 11 | 12 | interface IColumnMetadataSQLite { 13 | cid: number; 14 | dflt_value: any; 15 | name: string; 16 | notnull: number; 17 | pk: number; 18 | type: string; 19 | } 20 | 21 | interface IIndexMetadataSQLite { 22 | column_id: number, 23 | column_name: string, 24 | index_name: string, 25 | is_unique: number, 26 | origin: string, 27 | partial: number, 28 | seq: number, 29 | seq_number: number, 30 | } 31 | 32 | /** 33 | * Dialect for SQLite 34 | * @class DialectSQLite 35 | */ 36 | export class DialectSQLite extends Dialect { 37 | 38 | constructor() { 39 | super('sqlite'); 40 | } 41 | 42 | /** 43 | * Map database data type to sequelize data type 44 | * @param {string} dbType 45 | * @returns {string} 46 | */ 47 | public mapDbTypeToSequelize(dbType: string): AbstractDataTypeConstructor { 48 | // Affinity rules from https://www.sqlite.org/datatype3.html 49 | const dbTypeUpper = dbType.toUpperCase(); 50 | 51 | if (dbTypeUpper.includes('INT')) { 52 | return DataType.INTEGER; 53 | } 54 | else if (dbTypeUpper.includes('CHAR') || dbTypeUpper.includes('CLOB') || dbTypeUpper.includes('TEXT')) { 55 | return DataType.STRING; 56 | } 57 | else if (dbTypeUpper.includes('BLOB')) { 58 | return DataType.BLOB; 59 | } 60 | else if (dbTypeUpper.includes('REAL') || dbTypeUpper.includes('FLOA') || dbTypeUpper.includes('DOUB')) { 61 | return DataType.REAL; 62 | } 63 | else { 64 | return DataType.DECIMAL; 65 | } 66 | } 67 | 68 | /** 69 | * Map database data type to javascript data type 70 | * @param {string} dbType 71 | * @returns {string 72 | */ 73 | public mapDbTypeToJs(dbType: string): string { 74 | // Affinity rules from https://www.sqlite.org/datatype3.html 75 | const dbTypeUpper = dbType.toUpperCase(); 76 | 77 | if (dbTypeUpper.includes('INT')) { 78 | return 'number'; 79 | } 80 | else if (dbTypeUpper.includes('CHAR') || dbTypeUpper.includes('CLOB') || dbTypeUpper.includes('TEXT')) { 81 | return 'string'; 82 | } 83 | else if (dbTypeUpper.includes('BLOB')) { 84 | return 'Uint8Array'; 85 | } 86 | else { 87 | return 'number'; 88 | } 89 | } 90 | 91 | /** 92 | * Map database default values to Sequelize type (e.g. uuid() => DataType.UUIDV4). 93 | * @param {string} v 94 | * @returns {string} 95 | */ 96 | public mapDefaultValueToSequelize(v: string): string { 97 | return v; 98 | } 99 | 100 | /** 101 | * Fetch table names for the provided database/schema 102 | * @param {Sequelize} connection 103 | * @param {IConfig} config 104 | * @returns {Promise} 105 | */ 106 | protected async fetchTables( 107 | connection: Sequelize, 108 | config: IConfig 109 | ): Promise { 110 | const query = ` 111 | SELECT 112 | name AS table_name 113 | FROM sqlite_master 114 | WHERE type ='table' AND name NOT LIKE 'sqlite_%'; 115 | `; 116 | 117 | const tables: ITable[] = (await connection.query( 118 | query, 119 | { 120 | type: QueryTypes.SELECT, 121 | raw: true, 122 | } 123 | ) as ITableRow[]).map(({ table_name, table_comment }) => { 124 | const t: ITable = { 125 | name: table_name, 126 | comment: table_comment ?? undefined, 127 | }; 128 | 129 | return t; 130 | }); 131 | 132 | return tables; 133 | } 134 | 135 | protected async fetchColumnsMetadata( 136 | connection: Sequelize, 137 | config: IConfig, 138 | table: string 139 | ): Promise { 140 | const columnsMetadata: IColumnMetadata[] = []; 141 | 142 | const query = `PRAGMA main.table_info('${table}')`; 143 | 144 | const columns = await connection.query( 145 | query, 146 | { 147 | type: QueryTypes.SELECT, 148 | raw: true, 149 | } 150 | ) as IColumnMetadataSQLite[]; 151 | 152 | for (const column of columns) { 153 | // Unknown data type 154 | if (!this.mapDbTypeToSequelize(column.type)) { 155 | warnUnknownMappingForDataType(column.type); 156 | } 157 | 158 | const columnMetadata: IColumnMetadata = { 159 | name: column.name, 160 | originName: column.name, 161 | type: column.type, 162 | typeExt: column.type, 163 | ...this.mapDbTypeToSequelize(column.type) && { 164 | dataType: 'DataType.' + 165 | this.mapDbTypeToSequelize(column.type).key 166 | .split(' ')[0], // avoids 'DOUBLE PRECISION' key to include PRECISION in the mapping 167 | }, 168 | allowNull: !!column.notnull, 169 | primaryKey: !!column.pk, 170 | autoIncrement: !!column.pk, 171 | indices: [], 172 | comment: '', // TODO 173 | }; 174 | 175 | columnsMetadata.push(columnMetadata); 176 | } 177 | 178 | return columnsMetadata; 179 | } 180 | 181 | protected async fetchColumnIndexMetadata( 182 | connection: Sequelize, 183 | config: IConfig, 184 | table: string, 185 | column: string 186 | ): Promise { 187 | const indicesMetadata: IIndexMetadata[] = []; 188 | 189 | const query = ` 190 | SELECT 191 | il.seq, 192 | ii.seqno AS seq_number, 193 | ii.cid AS column_id, 194 | ii.name as column_name, 195 | il.name AS index_name, 196 | il.\`unique\` AS is_unique, 197 | il.origin, 198 | il.partial 199 | FROM sqlite_master AS m, 200 | pragma_index_list(m.name) AS il, 201 | pragma_index_info(il.name) AS ii 202 | WHERE m.type = 'table' AND m.name = '${table}' AND ii.name = '${column}' 203 | ORDER BY il.seq; 204 | `; 205 | 206 | const indices = await connection.query( 207 | query, 208 | { 209 | type: QueryTypes.SELECT, 210 | raw: true, 211 | } 212 | ) as IIndexMetadataSQLite[]; 213 | 214 | for (const index of indices) { 215 | indicesMetadata.push({ 216 | name: index.index_name, 217 | seq: index.seq_number, 218 | unique: !!index.is_unique, 219 | }); 220 | } 221 | 222 | return indicesMetadata; 223 | } 224 | 225 | } 226 | -------------------------------------------------------------------------------- /src/dialects/index.ts: -------------------------------------------------------------------------------- 1 | export { DialectMySQL } from './DialectMySQL'; 2 | export { DialectPostgres } from './DialectPostgres'; 3 | export { DialectMSSQL } from './DialectMSSQL'; 4 | export { DialectMariaDB } from './DialectMariaDB'; 5 | export { DialectSQLite } from './DialectSQLite'; 6 | -------------------------------------------------------------------------------- /src/dialects/utils.ts: -------------------------------------------------------------------------------- 1 | import { ITableMetadata } from './Dialect'; 2 | import { TransformCase, TransformFn, TransformMap, TransformTarget } from '../config/IConfig'; 3 | import { camelCase, constantCase, pascalCase, snakeCase } from "change-case"; 4 | 5 | type CaseTransformer = (s: string) => string; 6 | 7 | export const toUpperCase = (s: string) => s.toUpperCase(); 8 | export const toLowerCase = (s: string) => s.toLowerCase(); 9 | 10 | /** 11 | * Check if provided string is ASCII 12 | * @param {string} s 13 | * @returns {boolean} 14 | */ 15 | export const isASCII = (s: string): boolean => (/^[\x00-\xFF]*$/).test(s); 16 | 17 | /** 18 | * Get transformer for case 19 | * @param {TransformCase} transformCase 20 | * @returns {CaseTransformer} 21 | */ 22 | const getTransformerForCase = (transformCase: TransformCase): CaseTransformer => { 23 | let transformer: CaseTransformer; 24 | 25 | switch(transformCase) { 26 | case "CAMEL": 27 | transformer = camelCase; 28 | break; 29 | case "UPPER": 30 | transformer = toUpperCase; 31 | break; 32 | case "LOWER": 33 | transformer = toLowerCase; 34 | break; 35 | case "PASCAL": 36 | transformer = pascalCase; 37 | break; 38 | case "UNDERSCORE": 39 | transformer = snakeCase; 40 | break; 41 | case "CONST": 42 | transformer = constantCase; 43 | break; 44 | default: 45 | transformer = (s: string) => s; 46 | } 47 | 48 | return transformer; 49 | } 50 | 51 | /** 52 | * Wrapper for case transformer. Returns unprocessed string for non ASCII characters 53 | * @param {TransformCase | TransformMap} transformCase 54 | * @returns {TransformFn} 55 | */ 56 | export const transformerFactory = (transformCase: TransformCase | TransformMap): TransformFn => { 57 | let modelTransformer: CaseTransformer; 58 | let columnTransformer: CaseTransformer; 59 | 60 | if (typeof transformCase === 'string') { 61 | const transformer = getTransformerForCase(transformCase as TransformCase); 62 | modelTransformer = transformer; 63 | columnTransformer = transformer; 64 | } 65 | else { 66 | modelTransformer = getTransformerForCase(transformCase.model); 67 | columnTransformer = getTransformerForCase(transformCase.column); 68 | } 69 | 70 | return function(value: string, target: TransformTarget) { 71 | if (!isASCII(value)) { 72 | console.warn(`Unsupported case transformation for non ASCII characters:`, value); 73 | return value; 74 | } 75 | 76 | if (target === TransformTarget.MODEL) { 77 | return modelTransformer(value); 78 | } 79 | 80 | return columnTransformer(value); 81 | } 82 | }; 83 | 84 | /** 85 | * Get transformer 86 | * @param {TransformCase | TransformMap | TransformFn} transformCase 87 | * @returns {TransformFn} 88 | */ 89 | export const getTransformer = (transformCase: TransformCase | TransformMap | TransformFn): TransformFn => { 90 | if (typeof transformCase === 'function') { 91 | return transformCase; 92 | } 93 | 94 | return transformerFactory(transformCase); 95 | } 96 | 97 | /** 98 | * Transform ITableMetadata object using the provided case 99 | * @param {ITableMetadata} tableMetadata 100 | * @param {TransformCase} transformCase 101 | * @returns {ITableMetadata} 102 | */ 103 | export const caseTransformer = ( 104 | tableMetadata: ITableMetadata, 105 | transformCase: TransformCase | TransformMap | TransformFn 106 | ): ITableMetadata => { 107 | 108 | const transformer: TransformFn = getTransformer(transformCase); 109 | 110 | const transformed: ITableMetadata = { 111 | originName: tableMetadata.originName, 112 | name: transformer(tableMetadata.originName, TransformTarget.MODEL), 113 | timestamps: tableMetadata.timestamps, 114 | columns: {}, 115 | ...tableMetadata.associations && { 116 | associations: tableMetadata.associations.map(a => { 117 | a.targetModel = transformer(a.targetModel, TransformTarget.MODEL); 118 | 119 | if (a.joinModel) { 120 | a.joinModel = transformer(a.joinModel, TransformTarget.MODEL); 121 | } 122 | 123 | if (a.sourceKey) { 124 | a.sourceKey = transformer(a.sourceKey, TransformTarget.COLUMN); 125 | } 126 | 127 | return a; 128 | }) 129 | }, 130 | comment: tableMetadata.comment, 131 | }; 132 | 133 | for (const [columnName, columnMetadata] of Object.entries(tableMetadata.columns)) { 134 | 135 | if (columnMetadata.foreignKey) { 136 | const { name, targetModel } = columnMetadata.foreignKey; 137 | 138 | columnMetadata.foreignKey = { 139 | name: transformer(name, TransformTarget.COLUMN), 140 | targetModel: transformer(targetModel, TransformTarget.MODEL), 141 | } 142 | } 143 | 144 | transformed.columns[columnName] = Object.assign( 145 | {}, 146 | columnMetadata, 147 | { name: transformer(columnMetadata.originName, TransformTarget.COLUMN) } 148 | ); 149 | } 150 | 151 | return transformed; 152 | }; 153 | 154 | /** 155 | * Unknown mapping warning 156 | * @param {string} dataType 157 | * @returns {string} 158 | */ 159 | export const warnUnknownMappingForDataType = (dataType: string) => { 160 | console.warn(`[Warning]`, 161 | `Unknown data type mapping for type '${dataType}'. 162 | You should define the data type manually. 163 | `); 164 | }; 165 | 166 | /** 167 | * Generates precision signature 168 | * @param {Array} args 169 | * @returns {string} (80) or (10,4) or ... 170 | */ 171 | export const generatePrecisionSignature = (...args: Array): string => { 172 | const tokens = args.filter(arg => !!arg); 173 | 174 | return tokens.length ? `(${tokens.join(',')})` : ''; 175 | }; 176 | -------------------------------------------------------------------------------- /src/index.ts: -------------------------------------------------------------------------------- 1 | export * from './config'; 2 | export * from './dialects'; 3 | export * from './builders'; 4 | -------------------------------------------------------------------------------- /src/lint/Linter.ts: -------------------------------------------------------------------------------- 1 | import { ESLint } from 'eslint'; 2 | import { eslintDefaultConfig } from './eslintDefaultConfig'; 3 | 4 | /** 5 | * @class Linter 6 | */ 7 | export class Linter { 8 | private engine: ESLint; 9 | 10 | constructor(options?: ESLint.Options) { 11 | if (options) { 12 | this.engine = new ESLint(options); 13 | } 14 | else { 15 | this.engine = new ESLint({ 16 | baseConfig: eslintDefaultConfig, 17 | fix: true, 18 | }); 19 | } 20 | } 21 | 22 | async lintFiles(paths: string[]): Promise { 23 | const report = await this.engine.lintFiles(paths); 24 | await ESLint.outputFixes(report); 25 | } 26 | } 27 | -------------------------------------------------------------------------------- /src/lint/eslintDefaultConfig.ts: -------------------------------------------------------------------------------- 1 | import { Linter } from 'eslint'; 2 | 3 | export const eslintDefaultConfig: Linter.Config = { 4 | parser: '@typescript-eslint/parser', 5 | parserOptions: { 6 | ecmaVersion: 2019, 7 | sourceType: 'module', 8 | }, 9 | plugins: [], 10 | extends: [], 11 | rules: { 12 | 'padded-blocks': ['error', { blocks: 'always', classes: 'always', switches: 'always' }], 13 | 'lines-between-class-members': ['error', 'always' ], 14 | 'object-curly-newline': ['error', { 15 | 'ObjectExpression': 'always', 16 | 'ObjectPattern': { 'multiline': true }, 17 | 'ImportDeclaration': { 'multiline': true, 'minProperties': 3 }, 18 | 'ExportDeclaration': { 'multiline': true, 'minProperties': 3 }, 19 | }], 20 | 'object-property-newline': ['error'], 21 | 'indent': ['error', 'tab'], 22 | }, 23 | }; 24 | -------------------------------------------------------------------------------- /src/lint/index.ts: -------------------------------------------------------------------------------- 1 | export { Linter } from './Linter'; 2 | -------------------------------------------------------------------------------- /src/tests/environment.ts: -------------------------------------------------------------------------------- 1 | import { Options, Dialect as DialectType } from 'sequelize'; 2 | 3 | const setEnv = (): void => { 4 | if (!process.env['TEST_DB_HOST']) { 5 | process.env['TEST_DB_HOST'] = 'localhost'; 6 | } 7 | 8 | if (!process.env['TEST_DB_PORT']) { 9 | process.env['TEST_DB_PORT'] = '1234'; 10 | } 11 | 12 | if (!process.env['TEST_DB_DATABASE']) { 13 | process.env['TEST_DB_DATABASE'] = 'testdb'; 14 | } 15 | 16 | if (!process.env['TEST_DB_USERNAME']) { 17 | process.env['TEST_DB_USERNAME'] = 'sa'; 18 | } 19 | 20 | if (!process.env['TEST_DB_PASSWORD']) { 21 | process.env['TEST_DB_PASSWORD'] = 'Passw0rd88!'; 22 | } 23 | } 24 | 25 | /** 26 | * Build sequelize options from environment 27 | * @param {DialectType} dialect 28 | * @returns {Options} 29 | */ 30 | export const buildSequelizeOptions = (dialect: DialectType): Options => { 31 | setEnv(); 32 | 33 | let sequelizeOptions: Options = { 34 | dialect: dialect, 35 | host: process.env.TEST_DB_HOST, 36 | port: parseInt(process.env.TEST_DB_PORT!), 37 | database: process.env.TEST_DB_DATABASE, 38 | username: process.env.TEST_DB_USERNAME, 39 | password: process.env.TEST_DB_PASSWORD, 40 | logQueryParameters: true, 41 | logging: true, 42 | 43 | ...dialect === 'mariadb' && { dialectOptions: { 44 | timezone: 'Etc/GMT-3', 45 | } 46 | }, 47 | 48 | ...dialect === 'sqlite' && { 49 | storage: 'memory', 50 | } 51 | }; 52 | 53 | return sequelizeOptions; 54 | }; 55 | -------------------------------------------------------------------------------- /src/tests/integration/ITestMetadata.ts: -------------------------------------------------------------------------------- 1 | import { Dialect } from 'sequelize'; 2 | import { Sequelize } from 'sequelize-typescript'; 3 | 4 | interface ITestTable { 5 | name: string; 6 | dropQuery: string; 7 | createQueries: string[]; 8 | insertQueries?: string[]; 9 | } 10 | 11 | interface ITestSchema { 12 | name: string; 13 | createQuery: string; 14 | dropQuery: string; 15 | } 16 | 17 | type GetColumnNativeDataTypeFn = ( 18 | connection: Sequelize, 19 | schema: string, 20 | table: string, 21 | column: string 22 | ) => Promise; 23 | 24 | export interface ITestMetadata { 25 | name: string; 26 | dialect: Dialect; 27 | schema?: ITestSchema; 28 | testTables: ITestTable[]; 29 | testViews?: ITestTable[]; 30 | filterTables: string[]; 31 | filterSkipTables: string[]; 32 | dataTypes: { 33 | dataTypesTable: string; 34 | // Should return the native data type for a given column in a table 35 | getColumnNativeDataType: GetColumnNativeDataTypeFn; 36 | testValues: [string, any][]; 37 | }, 38 | associations: { 39 | leftTableOneToOne: string; // Left table 1:1 relation 40 | rightTableOneToOne: string; // Right table 1:1 relation 41 | leftTableOneToMany: string; // Left table 1:N relation 42 | rightTableOneToMany: string; // Right table 1:N relation 43 | leftTableManyToMany: string; // Left table N:N relation 44 | rightTableManyToMany: string; // Right table N:N relation 45 | }, 46 | } 47 | -------------------------------------------------------------------------------- /src/tests/integration/associations.csv: -------------------------------------------------------------------------------- 1 | 1:1, passport_id, passport_id, person, passport 2 | 1:N, race_id, race_id, races, units 3 | N:N, author_id, book_id, authors, books, authors_books 4 | -------------------------------------------------------------------------------- /src/tests/integration/docker-remove-containers.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | set -e 3 | 4 | # Stop and remove any running container 5 | if [[ "$OSTYPE" == "darwin"* ]]; then 6 | # MAC applies --no-run-if-empty by default 7 | docker ps -a | awk -F '[ ]+' 'NR>1 {print($1)}' | xargs -n1 docker stop | xargs -n1 docker rm --volumes 8 | else 9 | docker ps -a | awk -F '[ ]+' 'NR>1 {print($1)}' | xargs --no-run-if-empty -n1 docker stop | xargs --no-run-if-empty -n1 docker rm --volumes 10 | fi 11 | -------------------------------------------------------------------------------- /src/tests/integration/mariadb/docker-start-mariadb.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | set -e 3 | 4 | DIR="${BASH_SOURCE%/*}" 5 | if [[ ! -d "$DIR" ]]; then DIR="$PWD"; fi 6 | . "$DIR/../test-env.sh" 7 | 8 | IMAGE_NAME="mariadb" 9 | CONTAINER_NAME="mariadb" 10 | 11 | if [[ -z "${DOCKER_MARIADB_TAG}" ]]; then 12 | IMAGE_TAG="11" 13 | else 14 | IMAGE_TAG="${DOCKER_MARIADB_TAG}" 15 | fi 16 | 17 | IMAGE_FULL_NAME="$IMAGE_NAME:$IMAGE_TAG" 18 | 19 | docker pull "$IMAGE_FULL_NAME" 20 | 21 | docker run -d --name $CONTAINER_NAME \ 22 | -e MYSQL_DATABASE="$TEST_DB_DATABASE" \ 23 | -e MYSQL_USER="$TEST_DB_USERNAME" \ 24 | -e MYSQL_ROOT_PASSWORD="$TEST_DB_PASSWORD" \ 25 | -e MYSQL_PASSWORD="$TEST_DB_PASSWORD" \ 26 | -p "$TEST_DB_PORT":3306 \ 27 | "$IMAGE_FULL_NAME" 28 | 29 | # Wait until database becomes online 30 | until docker logs --tail all ${CONTAINER_NAME} 2>&1 | grep -c "MariaDB init process done. Ready for start up." > /dev/null; do 31 | echo "Waiting database to become online..." 32 | sleep 5 33 | done 34 | 35 | echo "Database online" 36 | -------------------------------------------------------------------------------- /src/tests/integration/mariadb/geometries.ts: -------------------------------------------------------------------------------- 1 | export const Point = JSON.parse(` 2 | {"type":"Point","coordinates":[1,1]} 3 | `); 4 | 5 | export const MultiPoint = JSON.parse(` 6 | {"type":"MultiPoint","coordinates":[[1,1],[2,2]]} 7 | `); 8 | 9 | export const LineString = JSON.parse(` 10 | {"type":"LineString","coordinates":[[0,0],[1,1],[2,2]]} 11 | `); 12 | 13 | export const MultiLineString = JSON.parse(` 14 | {"type":"MultiLineString","coordinates":[[[0,0],[1,1],[2,2]],[[0,0],[1,1],[2,2]]]} 15 | `); 16 | 17 | export const Polygon = JSON.parse(` 18 | {"type":"Polygon","coordinates":[[[0,0],[10,0],[10,10],[0,10],[0,0]],[[5,5],[7,5],[7,7],[5,7],[5,5]]]} 19 | `); 20 | 21 | export const MultiPolygon = JSON.parse(` 22 | {"type":"MultiPolygon","coordinates":[[[[0,0],[10,0],[10,10],[0,10],[0,0]],[[5,5],[7,5],[7,7],[5,7],[5,5]]],[[[0,0],[10,0],[10,10],[0,10],[0,0]],[[5,5],[7,5],[7,7],[5,7],[5,5]]]]} 23 | `); 24 | 25 | export const Geometry = JSON.parse(` 26 | {"type":"Point","coordinates":[1,1]} 27 | `); 28 | 29 | export const GeometryCollection = JSON.parse(` 30 | {"type":"GeometryCollection","geometries":[{"type":"Point","coordinates":[1,1]},{"type":"LineString","coordinates":[[0,0],[1,1],[2,2],[3,3],[4,4]]}]} 31 | `); 32 | 33 | 34 | -------------------------------------------------------------------------------- /src/tests/integration/mariadb/mariadb.test.ts: -------------------------------------------------------------------------------- 1 | import { Sequelize } from 'sequelize-typescript'; 2 | import { QueryTypes } from 'sequelize'; 3 | import { ITestMetadata } from '../ITestMetadata'; 4 | import { TestRunner } from '../TestRunner'; 5 | import * as geometries from './geometries'; 6 | import { 7 | DATA_TYPES_TABLE_NAME, 8 | DATA_TYPES_TABLE_DROP, 9 | DATA_TYPES_TABLE_CREATES, 10 | INDICES_TABLE_NAME, 11 | INDICES_TABLE_DROP, 12 | INDICES_TABLE_CREATES, 13 | AUTHORS_TABLE_NAME, 14 | AUTHORS_TABLE_DROP, 15 | AUTHORS_TABLE_CREATES, 16 | AUTHORS_TABLE_INSERTS, 17 | BOOKS_TABLE_NAME, 18 | BOOKS_TABLE_DROP, 19 | BOOKS_TABLE_CREATES, 20 | BOOKS_TABLE_INSERTS, 21 | AUTHORS_BOOKS_TABLE_NAME, 22 | AUTHORS_BOOKS_TABLE_DROP, 23 | AUTHORS_BOOKS_TABLE_CREATES, 24 | AUTHORS_BOOKS_TABLE_INSERTS, 25 | RACES_TABLE_NAME, 26 | RACES_TABLE_DROP, 27 | RACES_TABLE_CREATES, 28 | RACES_TABLE_INSERTS, 29 | UNITS_TABLE_NAME, 30 | UNITS_TABLE_DROP, 31 | UNITS_TABLE_CREATES, 32 | UNITS_TABLE_INSERTS, 33 | PERSON_TABLE_NAME, 34 | PERSON_TABLE_DROP, 35 | PERSON_TABLE_CREATES, 36 | PERSON_TABLE_INSERTS, 37 | PASSPORT_TABLE_NAME, 38 | PASSPORT_TABLE_DROP, 39 | PASSPORT_TABLE_CREATES, 40 | PASSPORT_TABLE_INSERTS, 41 | AUTHORS_VIEW_CREATES, 42 | AUTHORS_VIEW_DROP, 43 | AUTHORS_VIEW_NAME, 44 | } from './queries'; 45 | 46 | interface INativeType { 47 | DATA_TYPE: string; 48 | data_type: string; 49 | } 50 | 51 | export const testMetadata: ITestMetadata = { 52 | name: 'MariaDB', 53 | dialect: 'mariadb', 54 | testTables: [ 55 | { 56 | name: DATA_TYPES_TABLE_NAME, 57 | createQueries: DATA_TYPES_TABLE_CREATES, 58 | dropQuery: DATA_TYPES_TABLE_DROP, 59 | }, 60 | { 61 | name: INDICES_TABLE_NAME, 62 | createQueries: INDICES_TABLE_CREATES, 63 | dropQuery: INDICES_TABLE_DROP, 64 | }, 65 | { 66 | name: AUTHORS_TABLE_NAME, 67 | createQueries: AUTHORS_TABLE_CREATES, 68 | dropQuery: AUTHORS_TABLE_DROP, 69 | insertQueries: AUTHORS_TABLE_INSERTS, 70 | }, 71 | { 72 | name: BOOKS_TABLE_NAME, 73 | createQueries: BOOKS_TABLE_CREATES, 74 | dropQuery: BOOKS_TABLE_DROP, 75 | insertQueries: BOOKS_TABLE_INSERTS, 76 | }, 77 | { 78 | name: AUTHORS_BOOKS_TABLE_NAME, 79 | createQueries: AUTHORS_BOOKS_TABLE_CREATES, 80 | dropQuery: AUTHORS_BOOKS_TABLE_DROP, 81 | insertQueries: AUTHORS_BOOKS_TABLE_INSERTS, 82 | }, 83 | { 84 | name: RACES_TABLE_NAME, 85 | createQueries: RACES_TABLE_CREATES, 86 | dropQuery: RACES_TABLE_DROP, 87 | insertQueries: RACES_TABLE_INSERTS, 88 | }, 89 | { 90 | name: UNITS_TABLE_NAME, 91 | createQueries: UNITS_TABLE_CREATES, 92 | dropQuery: UNITS_TABLE_DROP, 93 | insertQueries: UNITS_TABLE_INSERTS, 94 | }, 95 | { 96 | name: PERSON_TABLE_NAME, 97 | createQueries: PERSON_TABLE_CREATES, 98 | dropQuery: PERSON_TABLE_DROP, 99 | insertQueries: PERSON_TABLE_INSERTS, 100 | }, 101 | { 102 | name: PASSPORT_TABLE_NAME, 103 | createQueries: PASSPORT_TABLE_CREATES, 104 | dropQuery: PASSPORT_TABLE_DROP, 105 | insertQueries: PASSPORT_TABLE_INSERTS, 106 | }, 107 | ], 108 | testViews: [ 109 | { 110 | name: AUTHORS_VIEW_NAME, 111 | createQueries: AUTHORS_VIEW_CREATES, 112 | dropQuery: AUTHORS_VIEW_DROP, 113 | } 114 | ], 115 | filterTables: [ DATA_TYPES_TABLE_NAME ], 116 | filterSkipTables: [ INDICES_TABLE_NAME ], 117 | dataTypes: { 118 | dataTypesTable: DATA_TYPES_TABLE_NAME, 119 | async getColumnNativeDataType( 120 | connection: Sequelize, 121 | schema: string, 122 | table: string, 123 | column: string): Promise 124 | { 125 | const query = ` 126 | SELECT DATA_TYPE 127 | FROM information_schema.columns 128 | WHERE table_schema='${schema}' AND table_name='${table}' AND column_name='${column}'; 129 | `; 130 | 131 | const res = await connection.query(query, { 132 | type: QueryTypes.SELECT, 133 | raw: true, 134 | }) as INativeType[]; 135 | 136 | return res[0].DATA_TYPE ?? res[0].data_type; 137 | }, 138 | testValues: [ 139 | ['bigint', 100000000000000000], 140 | ['smallint', 32767], 141 | ['mediumint', 8388607], 142 | ['tinyint', 127], 143 | ['decimal', '99.999'], 144 | ['float', 66.78], 145 | ['double', 11.2345], 146 | ['int', 2147483647], 147 | ['varchar', 'Hello world'], 148 | ['char', 'a'], 149 | ['tinytext', 'xyz'], 150 | ['mediumtext', 'Voodoo Lady'], 151 | ['longtext', 'Supercalifragilisticexpialidocious'], 152 | ['text', 'Access denied'], 153 | ['date', '2020-01-01'], 154 | ['time', '23:59:59'], 155 | ['datetime', new Date()], 156 | ['timestamp', new Date()], 157 | ['year', new Date().getFullYear()], 158 | ['enum', 'BB'], 159 | ['set', 'X'], 160 | ['bit', 127], 161 | ['binary', Buffer.from('A')], 162 | ['blob', Buffer.from('Not authorized')], 163 | ['tinyblob', Buffer.from('xyz')], 164 | ['mediumblob', Buffer.from('Voodoo Lady')], 165 | ['longblob', Buffer.from('Supercalifragilisticexpialidocious')], 166 | ['point', geometries.Point], 167 | ['multipoint', geometries.MultiPoint], 168 | ['linestring', geometries.LineString], 169 | ['multilinestring', geometries.MultiLineString], 170 | ['polygon', geometries.Polygon], 171 | ['multipolygon', geometries.MultiPolygon], 172 | ['geometry', geometries.Geometry], 173 | // ['geometrycollection', geometries.GeometryCollection], 174 | ['json', JSON.stringify({key1: 'value1', key2: 'value2'})], 175 | ] 176 | }, 177 | associations: { 178 | leftTableOneToOne: PERSON_TABLE_NAME, 179 | rightTableOneToOne: PASSPORT_TABLE_NAME, 180 | leftTableOneToMany: RACES_TABLE_NAME, 181 | rightTableOneToMany: UNITS_TABLE_NAME, 182 | leftTableManyToMany: AUTHORS_TABLE_NAME, 183 | rightTableManyToMany: BOOKS_TABLE_NAME, 184 | }, 185 | }; 186 | 187 | const testRunner = new TestRunner(testMetadata); 188 | testRunner.run(); 189 | -------------------------------------------------------------------------------- /src/tests/integration/mariadb/queries.ts: -------------------------------------------------------------------------------- 1 | export const DATA_TYPES_TABLE_NAME = 'data_types'; 2 | export const DATA_TYPES_TABLE_DROP = `DROP TABLE IF EXISTS ${DATA_TYPES_TABLE_NAME} CASCADE`; 3 | export const DATA_TYPES_TABLE_CREATES = [ 4 | ` 5 | CREATE TABLE ${DATA_TYPES_TABLE_NAME} 6 | ( 7 | id INT AUTO_INCREMENT PRIMARY KEY, 8 | f_bit BIT(7) null, 9 | f_bigint BIGINT null, 10 | f_smallint SMALLINT null, 11 | f_mediumint MEDIUMINT null, 12 | f_tinyint TINYINT null, 13 | f_decimal DECIMAL(7, 3) null, 14 | f_float FLOAT(5, 3) null, 15 | f_double DOUBLE(7, 4) null, 16 | f_int INT null, 17 | f_varchar VARCHAR(80) null, 18 | f_char CHAR(10) null, 19 | f_tinytext TINYTEXT null, 20 | f_mediumtext MEDIUMTEXT null, 21 | f_longtext LONGTEXT null, 22 | f_text TEXT null, 23 | f_date DATE null, 24 | f_time TIME null, 25 | f_datetime DATETIME(6) null, 26 | f_timestamp TIMESTAMP(3) null, 27 | f_year YEAR null, 28 | f_enum ENUM ('AA', 'BB') null, 29 | f_set SET ('X', 'Y') null, 30 | f_binary BINARY null, 31 | f_blob BLOB null, 32 | f_tinyblob TINYBLOB null, 33 | f_mediumblob MEDIUMBLOB null, 34 | f_longblob LONGBLOB null, 35 | f_point POINT null, 36 | f_multipoint MULTIPOINT null, 37 | f_linestring LINESTRING null, 38 | f_multilinestring MULTILINESTRING null, 39 | f_polygon POLYGON null, 40 | f_multipolygon MULTIPOLYGON null, 41 | f_geometry GEOMETRY null, 42 | f_json JSON null 43 | ) CHARSET = 'latin1' 44 | `, 45 | ]; 46 | 47 | export const INDICES_TABLE_NAME = 'indices'; 48 | export const INDICES_TABLE_DROP = `DROP TABLE IF EXISTS ${INDICES_TABLE_NAME} CASCADE`; 49 | export const INDICES_TABLE_CREATES = [ 50 | ` 51 | CREATE TABLE ${INDICES_TABLE_NAME} 52 | ( 53 | id int auto_increment primary key, 54 | f_unique bigint null, 55 | f_multi_1 int not null, 56 | f_multi_2 varchar(80) null, 57 | CONSTRAINT indices_f_multi_1_uindex UNIQUE (f_multi_1), 58 | CONSTRAINT indices_f_unique_uindex UNIQUE (f_unique) 59 | ) CHARSET = 'latin1' 60 | `, 61 | ` 62 | CREATE INDEX indices_f_multi_1_f_multi_2_index 63 | ON ${INDICES_TABLE_NAME} (f_multi_1, f_multi_2); 64 | `, 65 | ]; 66 | 67 | export const AUTHORS_TABLE_NAME = 'authors'; 68 | export const AUTHORS_TABLE_DROP = `DROP TABLE IF EXISTS ${AUTHORS_TABLE_NAME} CASCADE`; 69 | export const AUTHORS_TABLE_CREATES = [ 70 | ` 71 | CREATE TABLE ${AUTHORS_TABLE_NAME} 72 | ( 73 | author_id INT primary key, 74 | full_name VARCHAR(80) not null 75 | ); 76 | `, 77 | ]; 78 | export const AUTHORS_TABLE_INSERTS = [ 79 | `INSERT INTO ${AUTHORS_TABLE_NAME} VALUES (1, 'Isasc Asimov');`, 80 | `INSERT INTO ${AUTHORS_TABLE_NAME} VALUES (2, 'James Clavell');`, 81 | ]; 82 | 83 | export const BOOKS_TABLE_NAME = 'books'; 84 | export const BOOKS_TABLE_DROP = `DROP TABLE IF EXISTS ${BOOKS_TABLE_NAME} CASCADE`; 85 | export const BOOKS_TABLE_CREATES = [ 86 | ` 87 | CREATE TABLE ${BOOKS_TABLE_NAME} 88 | ( 89 | book_id INT PRIMARY KEY, 90 | title VARCHAR(80) not null 91 | ); 92 | `, 93 | ]; 94 | export const BOOKS_TABLE_INSERTS = [ 95 | `INSERT INTO ${BOOKS_TABLE_NAME} VALUES (1, 'Prelude to Foundation');`, 96 | `INSERT INTO ${BOOKS_TABLE_NAME} VALUES (2, 'The End of Eternity');`, 97 | `INSERT INTO ${BOOKS_TABLE_NAME} VALUES (3, 'Shogun');`, 98 | `INSERT INTO ${BOOKS_TABLE_NAME} VALUES (4, 'Galactic Shogun');`, 99 | ]; 100 | 101 | export const AUTHORS_BOOKS_TABLE_NAME = 'authors_books'; 102 | export const AUTHORS_BOOKS_TABLE_DROP = `DROP TABLE IF EXISTS ${AUTHORS_BOOKS_TABLE_NAME} CASCADE`; 103 | export const AUTHORS_BOOKS_TABLE_CREATES = [ 104 | ` 105 | CREATE TABLE ${AUTHORS_BOOKS_TABLE_NAME} 106 | ( 107 | author_id INT not null, 108 | book_id INT not null, 109 | PRIMARY KEY (author_id, book_id) 110 | ); 111 | `, 112 | ]; 113 | export const AUTHORS_BOOKS_TABLE_INSERTS = [ 114 | `INSERT INTO ${AUTHORS_BOOKS_TABLE_NAME} VALUES (1, 1);`, 115 | `INSERT INTO ${AUTHORS_BOOKS_TABLE_NAME} VALUES (1, 2);`, 116 | `INSERT INTO ${AUTHORS_BOOKS_TABLE_NAME} VALUES (1, 4);`, 117 | `INSERT INTO ${AUTHORS_BOOKS_TABLE_NAME} VALUES (2, 3);`, 118 | `INSERT INTO ${AUTHORS_BOOKS_TABLE_NAME} VALUES (2, 4);`, 119 | ]; 120 | 121 | export const RACES_TABLE_NAME = 'races'; 122 | export const RACES_TABLE_DROP = `DROP TABLE IF EXISTS ${RACES_TABLE_NAME} CASCADE`; 123 | export const RACES_TABLE_CREATES = [ 124 | ` 125 | CREATE TABLE ${RACES_TABLE_NAME} 126 | ( 127 | race_id INT PRIMARY KEY, 128 | race_name VARCHAR(80) NOT NULL 129 | ); 130 | `, 131 | ]; 132 | export const RACES_TABLE_INSERTS = [ 133 | `INSERT INTO ${RACES_TABLE_NAME} VALUES(1, 'Orcs');`, 134 | `INSERT INTO ${RACES_TABLE_NAME} VALUES(2, 'Humans');`, 135 | `INSERT INTO ${RACES_TABLE_NAME} VALUES(3, 'Night Elves');`, 136 | `INSERT INTO ${RACES_TABLE_NAME} VALUES(4, 'Undead');`, 137 | ]; 138 | 139 | export const UNITS_TABLE_NAME = 'units'; 140 | export const UNITS_TABLE_DROP = `DROP TABLE IF EXISTS ${UNITS_TABLE_NAME} CASCADE`; 141 | export const UNITS_TABLE_CREATES = [ 142 | ` 143 | CREATE TABLE ${UNITS_TABLE_NAME} 144 | ( 145 | unit_id INT PRIMARY KEY, 146 | unit_name VARCHAR(80) NOT NULL, 147 | race_id INT NOT NULL 148 | ); 149 | `, 150 | ]; 151 | export const UNITS_TABLE_INSERTS = [ 152 | `INSERT INTO ${UNITS_TABLE_NAME} VALUES(1, 'Tauren Warrior', 1);`, 153 | `INSERT INTO ${UNITS_TABLE_NAME} VALUES(2, 'Kodo Beast', 1);`, 154 | `INSERT INTO ${UNITS_TABLE_NAME} VALUES(3, 'Rifleman', 2);`, 155 | `INSERT INTO ${UNITS_TABLE_NAME} VALUES(4, 'Dryad', 3);`, 156 | `INSERT INTO ${UNITS_TABLE_NAME} VALUES(5, 'Archer', 3);`, 157 | `INSERT INTO ${UNITS_TABLE_NAME} VALUES(6, 'Ghoul', 4);`, 158 | `INSERT INTO ${UNITS_TABLE_NAME} VALUES(7, 'Frost Wyrm', 4);`, 159 | ]; 160 | 161 | export const PERSON_TABLE_NAME = 'person'; 162 | export const PERSON_TABLE_DROP = `DROP TABLE IF EXISTS ${PERSON_TABLE_NAME} CASCADE`; 163 | export const PERSON_TABLE_CREATES = [ 164 | ` 165 | CREATE TABLE ${PERSON_TABLE_NAME} 166 | ( 167 | person_id INT PRIMARY KEY, 168 | name VARCHAR(80) NOT NULL, 169 | passport_id INT NOT NULL 170 | ); 171 | `, 172 | ]; 173 | export const PERSON_TABLE_INSERTS = [ 174 | `INSERT INTO ${PERSON_TABLE_NAME} VALUES(1, 'Arthas', 1);`, 175 | ]; 176 | 177 | export const PASSPORT_TABLE_NAME = 'passport'; 178 | export const PASSPORT_TABLE_DROP = `DROP TABLE IF EXISTS ${PASSPORT_TABLE_NAME} CASCADE`; 179 | export const PASSPORT_TABLE_CREATES = [ 180 | ` 181 | CREATE TABLE ${PASSPORT_TABLE_NAME} 182 | ( 183 | passport_id INT PRIMARY KEY, 184 | code VARCHAR(80) NOT NULL 185 | ); 186 | `, 187 | ]; 188 | export const PASSPORT_TABLE_INSERTS = [ 189 | `INSERT INTO ${PASSPORT_TABLE_NAME} VALUES(1, 'Frostmourne');`, 190 | ]; 191 | 192 | export const AUTHORS_VIEW_NAME = 'authors_view'; 193 | export const AUTHORS_VIEW_DROP = `DROP VIEW IF EXISTS ${AUTHORS_VIEW_NAME}`; 194 | export const AUTHORS_VIEW_CREATES = [ 195 | `CREATE OR REPLACE VIEW ${AUTHORS_VIEW_NAME} AS SELECT full_name FROM ${AUTHORS_TABLE_NAME}`, 196 | ]; 197 | -------------------------------------------------------------------------------- /src/tests/integration/mssql/docker-start-mssql.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | set -e 3 | 4 | DIR="${BASH_SOURCE%/*}" 5 | if [[ ! -d "$DIR" ]]; then DIR="$PWD"; fi 6 | . "$DIR/../test-env.sh" 7 | 8 | IMAGE_NAME="mcr.microsoft.com/mssql/server" 9 | CONTAINER_NAME="mssql" 10 | 11 | if [[ -z "${DOCKER_MSSQL_TAG}" ]]; then 12 | IMAGE_TAG="2022-latest" 13 | else 14 | IMAGE_TAG="${DOCKER_MSSQL_TAG}" 15 | fi 16 | 17 | IMAGE_FULL_NAME="$IMAGE_NAME:$IMAGE_TAG" 18 | 19 | docker pull --platform=linux/amd64 "$IMAGE_FULL_NAME" 20 | 21 | # NB: to test on Apple Silicon computers you have to enable "Use Rosetta" in Docker settings: see https://github.com/microsoft/mssql-docker/issues/668#issuecomment-1436802153 22 | docker run --platform=linux/amd64 -d --name $CONTAINER_NAME \ 23 | -e ACCEPT_EULA="Y" \ 24 | -e SA_PASSWORD="$TEST_DB_PASSWORD" \ 25 | -p "$TEST_DB_PORT":1433 \ 26 | "$IMAGE_FULL_NAME" 27 | 28 | # Wait until database becomes online 29 | until docker logs --tail all ${CONTAINER_NAME} 2>&1 | grep -c "Service Broker manager has started." > /dev/null; do 30 | echo "Waiting database to become online..." 31 | sleep 5 32 | done 33 | 34 | echo "Database online" 35 | 36 | # Create test database 37 | docker exec -i "$CONTAINER_NAME" /opt/mssql-tools18/bin/sqlcmd \ 38 | -S localhost \ 39 | -N \ 40 | -C \ 41 | -U "$TEST_DB_USERNAME" \ 42 | -P "$TEST_DB_PASSWORD" \ 43 | -Q "CREATE DATABASE $TEST_DB_DATABASE" 44 | -------------------------------------------------------------------------------- /src/tests/integration/mssql/mssql.test.ts: -------------------------------------------------------------------------------- 1 | import { Sequelize } from 'sequelize-typescript'; 2 | import { QueryTypes } from 'sequelize'; 3 | import { ITestMetadata } from '../ITestMetadata'; 4 | import { TestRunner } from '../TestRunner'; 5 | import { 6 | DATA_TYPES_TABLE_NAME, 7 | DATA_TYPES_TABLE_DROP, 8 | DATA_TYPES_TABLE_CREATES, 9 | INDICES_TABLE_NAME, 10 | INDICES_TABLE_DROP, 11 | INDICES_TABLE_CREATES, 12 | AUTHORS_TABLE_NAME, 13 | AUTHORS_TABLE_DROP, 14 | AUTHORS_TABLE_CREATES, 15 | AUTHORS_TABLE_INSERTS, 16 | BOOKS_TABLE_NAME, 17 | BOOKS_TABLE_DROP, 18 | BOOKS_TABLE_CREATES, 19 | BOOKS_TABLE_INSERTS, 20 | AUTHORS_BOOKS_TABLE_NAME, 21 | AUTHORS_BOOKS_TABLE_DROP, 22 | AUTHORS_BOOKS_TABLE_CREATES, 23 | AUTHORS_BOOKS_TABLE_INSERTS, 24 | RACES_TABLE_NAME, 25 | RACES_TABLE_DROP, 26 | RACES_TABLE_CREATES, 27 | RACES_TABLE_INSERTS, 28 | UNITS_TABLE_NAME, 29 | UNITS_TABLE_DROP, 30 | UNITS_TABLE_CREATES, 31 | UNITS_TABLE_INSERTS, 32 | PERSON_TABLE_NAME, 33 | PERSON_TABLE_DROP, 34 | PERSON_TABLE_CREATES, 35 | PERSON_TABLE_INSERTS, 36 | PASSPORT_TABLE_NAME, 37 | PASSPORT_TABLE_DROP, 38 | PASSPORT_TABLE_CREATES, 39 | PASSPORT_TABLE_INSERTS, 40 | } from './queries'; 41 | 42 | interface INativeType { 43 | DATA_TYPE: string; 44 | data_type: string; 45 | } 46 | 47 | const testMetadata: ITestMetadata = { 48 | name: 'MSSQL', 49 | dialect: 'mssql', 50 | testTables: [ 51 | { 52 | name: DATA_TYPES_TABLE_NAME, 53 | createQueries: DATA_TYPES_TABLE_CREATES, 54 | dropQuery: DATA_TYPES_TABLE_DROP, 55 | }, 56 | { 57 | name: INDICES_TABLE_NAME, 58 | createQueries: INDICES_TABLE_CREATES, 59 | dropQuery: INDICES_TABLE_DROP, 60 | }, 61 | { 62 | name: AUTHORS_TABLE_NAME, 63 | createQueries: AUTHORS_TABLE_CREATES, 64 | dropQuery: AUTHORS_TABLE_DROP, 65 | insertQueries: AUTHORS_TABLE_INSERTS, 66 | }, 67 | { 68 | name: BOOKS_TABLE_NAME, 69 | createQueries: BOOKS_TABLE_CREATES, 70 | dropQuery: BOOKS_TABLE_DROP, 71 | insertQueries: BOOKS_TABLE_INSERTS, 72 | }, 73 | { 74 | name: AUTHORS_BOOKS_TABLE_NAME, 75 | createQueries: AUTHORS_BOOKS_TABLE_CREATES, 76 | dropQuery: AUTHORS_BOOKS_TABLE_DROP, 77 | insertQueries: AUTHORS_BOOKS_TABLE_INSERTS, 78 | }, 79 | { 80 | name: RACES_TABLE_NAME, 81 | createQueries: RACES_TABLE_CREATES, 82 | dropQuery: RACES_TABLE_DROP, 83 | insertQueries: RACES_TABLE_INSERTS, 84 | }, 85 | { 86 | name: UNITS_TABLE_NAME, 87 | createQueries: UNITS_TABLE_CREATES, 88 | dropQuery: UNITS_TABLE_DROP, 89 | insertQueries: UNITS_TABLE_INSERTS, 90 | }, 91 | { 92 | name: PERSON_TABLE_NAME, 93 | createQueries: PERSON_TABLE_CREATES, 94 | dropQuery: PERSON_TABLE_DROP, 95 | insertQueries: PERSON_TABLE_INSERTS, 96 | }, 97 | { 98 | name: PASSPORT_TABLE_NAME, 99 | createQueries: PASSPORT_TABLE_CREATES, 100 | dropQuery: PASSPORT_TABLE_DROP, 101 | insertQueries: PASSPORT_TABLE_INSERTS, 102 | }, 103 | ], 104 | filterTables: [ DATA_TYPES_TABLE_NAME ], 105 | filterSkipTables: [ INDICES_TABLE_NAME ], 106 | dataTypes: { 107 | dataTypesTable: DATA_TYPES_TABLE_NAME, 108 | async getColumnNativeDataType( 109 | connection: Sequelize, 110 | schema: string, 111 | table: string, 112 | column: string): Promise 113 | { 114 | const query = ` 115 | SELECT DATA_TYPE 116 | FROM information_schema.columns 117 | WHERE table_catalog=N'${schema}' AND table_name=N'${table}' AND column_name=N'${column}'; 118 | `; 119 | 120 | const res = await connection.query(query, { 121 | type: QueryTypes.SELECT, 122 | raw: true, 123 | }) as INativeType[]; 124 | 125 | return res[0].DATA_TYPE ?? res[0].data_type; 126 | }, 127 | testValues: [ 128 | ['int', 2147483647], 129 | ['整数', 2147483647], 130 | ['bigint', 9007199254740991], 131 | ['tinyint', 127], 132 | ['smallint', 32767], 133 | ['numeric', '99.999'], 134 | ['decimal', '99.999'], 135 | ['float', 15.23], 136 | ['real', 29.78], 137 | ['dec', '99.999'], 138 | ['money', 3500.25], 139 | ['char', 'A'], 140 | ['character', 'A'], 141 | ['nchar', 'A'], 142 | ['varchar', 'Mairubarelabarba'], 143 | ['nvarchar', 'inbarbaadunbarbaro'], 144 | ['text', 'quandoseiinveste'], 145 | ['ntext', 'dirabarbaro'], 146 | ['double', '99.999'], 147 | ['date', '2020-01-01'], 148 | // ['datetime', '2020-12-12 11:30:30.12345'], 149 | ['datetime2', new Date()], 150 | ['datetimeoffset', '2020-12-12 11:30:30.12345'], 151 | ['time', '23:59:59'], 152 | // ['timestamp', '2020-02-04 17:19:08.267'], 153 | // ['smalldatetime', ''], 154 | ['smallmoney', 3500.25], 155 | ['binary', Buffer.from('1 or 0')], 156 | ['bit', 1], 157 | ['uniqueidentifier', '0E984725-C51C-4BF4-9960-E1C80E27ABA0'], 158 | ['xml', 'it'], 159 | ['varbinary', Buffer.from('1 or 0')], 160 | ] 161 | }, 162 | associations: { 163 | leftTableOneToOne: PERSON_TABLE_NAME, 164 | rightTableOneToOne: PASSPORT_TABLE_NAME, 165 | leftTableOneToMany: RACES_TABLE_NAME, 166 | rightTableOneToMany: UNITS_TABLE_NAME, 167 | leftTableManyToMany: AUTHORS_TABLE_NAME, 168 | rightTableManyToMany: BOOKS_TABLE_NAME, 169 | } 170 | }; 171 | 172 | const testRunner = new TestRunner(testMetadata); 173 | testRunner.run(); 174 | -------------------------------------------------------------------------------- /src/tests/integration/mssql/queries.ts: -------------------------------------------------------------------------------- 1 | export const DATA_TYPES_TABLE_NAME = 'データ型'; // 'data types' in japanese, for testing non ASCII string on MSSQL 2 | export const DATA_TYPES_TABLE_DROP = `DROP TABLE IF EXISTS ${DATA_TYPES_TABLE_NAME}`; 3 | export const DATA_TYPES_TABLE_CREATES = [ 4 | ` 5 | CREATE TABLE ${DATA_TYPES_TABLE_NAME} 6 | ( 7 | id INT identity constraint data_types_pk primary key nonclustered, 8 | f_int INT, 9 | f_整数 INTEGER, 10 | f_bigint BIGINT, 11 | f_tinyint TINYINT, 12 | f_smallint SMALLINT, 13 | f_numeric NUMERIC(7, 2), 14 | f_decimal DECIMAL(5, 2), 15 | f_float FLOAT, 16 | f_real REAL, 17 | f_dec DEC(5, 2), 18 | f_money MONEY, 19 | f_char CHAR(1), 20 | f_character CHARACTER(1), 21 | f_nchar NCHAR, 22 | f_varchar VARCHAR(80), 23 | f_nvarchar NVARCHAR(80), 24 | f_text TEXT, 25 | f_ntext NTEXT, 26 | f_double DOUBLE PRECISION, 27 | f_date DATE, 28 | f_datetime DATETIME, 29 | f_datetime2 DATETIME2, 30 | f_datetimeoffset DATETIMEOFFSET, 31 | f_time TIME, 32 | f_smalldatetime SMALLDATETIME, 33 | f_smallmoney SMALLMONEY, 34 | f_binary BINARY(16), 35 | f_bit BIT, 36 | f_uniqueidentifier UNIQUEIDENTIFIER, 37 | f_xml XML, 38 | f_varbinary VARBINARY(16) 39 | ); 40 | `, 41 | ]; 42 | 43 | export const INDICES_TABLE_NAME = 'indices'; 44 | export const INDICES_TABLE_DROP = `DROP TABLE IF EXISTS ${INDICES_TABLE_NAME}`; 45 | export const INDICES_TABLE_CREATES = [ 46 | ` 47 | create table indices 48 | ( 49 | id INT identity constraint indices_pk primary key nonclustered, 50 | f_unique INT, 51 | f_multi_1 VARCHAR(80) not null, 52 | f_multi_2 INT, 53 | f_not_unique INT 54 | ); 55 | `, 56 | ` 57 | CREATE UNIQUE INDEX indices_f_unique_uindex 58 | ON indices (f_unique); 59 | `, 60 | ` 61 | CREATE UNIQUE INDEX indices_f_multi_1_f_multi_2_uindex 62 | ON indices (f_multi_1, f_multi_2); 63 | `, 64 | ` 65 | CREATE INDEX indices_f_not_unique_index 66 | ON indices (f_not_unique); 67 | `, 68 | ` 69 | CREATE UNIQUE INDEX indices_f_multi_1_uindex 70 | ON indices (f_multi_1); 71 | `, 72 | ]; 73 | 74 | export const AUTHORS_TABLE_NAME = 'authors'; 75 | export const AUTHORS_TABLE_DROP = `DROP TABLE IF EXISTS ${AUTHORS_TABLE_NAME}`; 76 | export const AUTHORS_TABLE_CREATES = [ 77 | ` 78 | CREATE TABLE ${AUTHORS_TABLE_NAME} 79 | ( 80 | author_id INT primary key, 81 | full_name VARCHAR(80) not null 82 | ); 83 | `, 84 | ]; 85 | export const AUTHORS_TABLE_INSERTS = [ 86 | `INSERT INTO ${AUTHORS_TABLE_NAME} VALUES (1, 'Isasc Asimov');`, 87 | `INSERT INTO ${AUTHORS_TABLE_NAME} VALUES (2, 'James Clavell');`, 88 | ]; 89 | 90 | export const BOOKS_TABLE_NAME = 'books'; 91 | export const BOOKS_TABLE_DROP = `DROP TABLE IF EXISTS ${BOOKS_TABLE_NAME}`; 92 | export const BOOKS_TABLE_CREATES = [ 93 | ` 94 | CREATE TABLE ${BOOKS_TABLE_NAME} 95 | ( 96 | book_id INT PRIMARY KEY, 97 | title VARCHAR(80) not null 98 | ); 99 | `, 100 | ]; 101 | export const BOOKS_TABLE_INSERTS = [ 102 | `INSERT INTO ${BOOKS_TABLE_NAME} VALUES (1, 'Prelude to Foundation');`, 103 | `INSERT INTO ${BOOKS_TABLE_NAME} VALUES (2, 'The End of Eternity');`, 104 | `INSERT INTO ${BOOKS_TABLE_NAME} VALUES (3, 'Shogun');`, 105 | `INSERT INTO ${BOOKS_TABLE_NAME} VALUES (4, 'Galactic Shogun');`, 106 | ]; 107 | 108 | export const AUTHORS_BOOKS_TABLE_NAME = 'authors_books'; 109 | export const AUTHORS_BOOKS_TABLE_DROP = `DROP TABLE IF EXISTS ${AUTHORS_BOOKS_TABLE_NAME}`; 110 | export const AUTHORS_BOOKS_TABLE_CREATES = [ 111 | ` 112 | CREATE TABLE ${AUTHORS_BOOKS_TABLE_NAME} 113 | ( 114 | author_id INT not null, 115 | book_id INT not null, 116 | PRIMARY KEY (author_id, book_id) 117 | ); 118 | `, 119 | ]; 120 | export const AUTHORS_BOOKS_TABLE_INSERTS = [ 121 | `INSERT INTO ${AUTHORS_BOOKS_TABLE_NAME} VALUES (1, 1);`, 122 | `INSERT INTO ${AUTHORS_BOOKS_TABLE_NAME} VALUES (1, 2);`, 123 | `INSERT INTO ${AUTHORS_BOOKS_TABLE_NAME} VALUES (1, 4);`, 124 | `INSERT INTO ${AUTHORS_BOOKS_TABLE_NAME} VALUES (2, 3);`, 125 | `INSERT INTO ${AUTHORS_BOOKS_TABLE_NAME} VALUES (2, 4);`, 126 | ]; 127 | 128 | export const RACES_TABLE_NAME = 'races'; 129 | export const RACES_TABLE_DROP = `DROP TABLE IF EXISTS ${RACES_TABLE_NAME}`; 130 | export const RACES_TABLE_CREATES = [ 131 | ` 132 | CREATE TABLE ${RACES_TABLE_NAME} 133 | ( 134 | race_id INT PRIMARY KEY, 135 | race_name VARCHAR(80) NOT NULL 136 | ); 137 | `, 138 | ]; 139 | export const RACES_TABLE_INSERTS = [ 140 | `INSERT INTO ${RACES_TABLE_NAME} VALUES(1, 'Orcs');`, 141 | `INSERT INTO ${RACES_TABLE_NAME} VALUES(2, 'Humans');`, 142 | `INSERT INTO ${RACES_TABLE_NAME} VALUES(3, 'Night Elves');`, 143 | `INSERT INTO ${RACES_TABLE_NAME} VALUES(4, 'Undead');`, 144 | ]; 145 | 146 | export const UNITS_TABLE_NAME = 'units'; 147 | export const UNITS_TABLE_DROP = `DROP TABLE IF EXISTS ${UNITS_TABLE_NAME}`; 148 | export const UNITS_TABLE_CREATES = [ 149 | ` 150 | CREATE TABLE ${UNITS_TABLE_NAME} 151 | ( 152 | unit_id INT PRIMARY KEY, 153 | unit_name VARCHAR(80) NOT NULL, 154 | race_id INT NOT NULL 155 | ); 156 | `, 157 | ]; 158 | export const UNITS_TABLE_INSERTS = [ 159 | `INSERT INTO ${UNITS_TABLE_NAME} VALUES(1, 'Tauren Warrior', 1);`, 160 | `INSERT INTO ${UNITS_TABLE_NAME} VALUES(2, 'Kodo Beast', 1);`, 161 | `INSERT INTO ${UNITS_TABLE_NAME} VALUES(3, 'Rifleman', 2);`, 162 | `INSERT INTO ${UNITS_TABLE_NAME} VALUES(4, 'Dryad', 3);`, 163 | `INSERT INTO ${UNITS_TABLE_NAME} VALUES(5, 'Archer', 3);`, 164 | `INSERT INTO ${UNITS_TABLE_NAME} VALUES(6, 'Ghoul', 4);`, 165 | `INSERT INTO ${UNITS_TABLE_NAME} VALUES(7, 'Frost Wyrm', 4);`, 166 | ]; 167 | 168 | export const PERSON_TABLE_NAME = 'person'; 169 | export const PERSON_TABLE_DROP = `DROP TABLE IF EXISTS ${PERSON_TABLE_NAME}`; 170 | export const PERSON_TABLE_CREATES = [ 171 | ` 172 | CREATE TABLE ${PERSON_TABLE_NAME} 173 | ( 174 | person_id INT PRIMARY KEY, 175 | name VARCHAR(80) NOT NULL, 176 | passport_id INT NOT NULL 177 | ); 178 | `, 179 | ]; 180 | export const PERSON_TABLE_INSERTS = [ 181 | `INSERT INTO ${PERSON_TABLE_NAME} VALUES(1, 'Arthas', 1);`, 182 | ]; 183 | 184 | export const PASSPORT_TABLE_NAME = 'passport'; 185 | export const PASSPORT_TABLE_DROP = `DROP TABLE IF EXISTS ${PASSPORT_TABLE_NAME}`; 186 | export const PASSPORT_TABLE_CREATES = [ 187 | ` 188 | CREATE TABLE ${PASSPORT_TABLE_NAME} 189 | ( 190 | passport_id INT PRIMARY KEY, 191 | code VARCHAR(80) NOT NULL 192 | ); 193 | `, 194 | ]; 195 | export const PASSPORT_TABLE_INSERTS = [ 196 | `INSERT INTO ${PASSPORT_TABLE_NAME} VALUES(1, 'Frostmourne');`, 197 | ]; 198 | -------------------------------------------------------------------------------- /src/tests/integration/mysql/docker-start-mysql.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | set -e 3 | 4 | DIR="${BASH_SOURCE%/*}" 5 | if [[ ! -d "$DIR" ]]; then DIR="$PWD"; fi 6 | . "$DIR/../test-env.sh" 7 | 8 | IMAGE_NAME="mysql" 9 | CONTAINER_NAME="mysql" 10 | 11 | if [[ -z "${DOCKER_MYSQL_TAG}" ]]; then 12 | IMAGE_TAG="8" 13 | else 14 | IMAGE_TAG="${DOCKER_MYSQL_TAG}" 15 | fi 16 | 17 | IMAGE_FULL_NAME="$IMAGE_NAME:$IMAGE_TAG" 18 | 19 | docker pull "$IMAGE_FULL_NAME" 20 | 21 | docker run -d --name "$CONTAINER_NAME" \ 22 | -e MYSQL_DATABASE="$TEST_DB_DATABASE" \ 23 | -e MYSQL_USER="$TEST_DB_USERNAME" \ 24 | -e MYSQL_PASSWORD="$TEST_DB_PASSWORD" \ 25 | -e MYSQL_ROOT_PASSWORD="$TEST_DB_PASSWORD" \ 26 | -p "$TEST_DB_PORT":3306 \ 27 | "$IMAGE_FULL_NAME" 28 | 29 | # Wait until database becomes online 30 | until docker logs --tail all ${CONTAINER_NAME} 2>&1 | grep -c "MySQL init process done. Ready for start up" > /dev/null; do 31 | echo "Waiting database to become online..." 32 | sleep 5 33 | done 34 | 35 | echo "Database online" 36 | -------------------------------------------------------------------------------- /src/tests/integration/mysql/geometries.ts: -------------------------------------------------------------------------------- 1 | export const Point = JSON.parse(` 2 | {"type":"Point","coordinates":[1,1]} 3 | `); 4 | 5 | export const MultiPoint = JSON.parse(` 6 | {"type":"MultiPoint","coordinates":[[1,1],[2,2]]} 7 | `); 8 | 9 | export const LineString = JSON.parse(` 10 | {"type":"LineString","coordinates":[[0,0],[1,1],[2,2]]} 11 | `); 12 | 13 | export const MultiLineString = JSON.parse(` 14 | {"type":"MultiLineString","coordinates":[[[0,0],[1,1],[2,2]],[[0,0],[1,1],[2,2]]]} 15 | `); 16 | 17 | export const Polygon = JSON.parse(` 18 | {"type":"Polygon","coordinates":[[[0,0],[10,0],[10,10],[0,10],[0,0]],[[5,5],[7,5],[7,7],[5,7],[5,5]]]} 19 | `); 20 | 21 | export const MultiPolygon = JSON.parse(` 22 | {"type":"MultiPolygon","coordinates":[[[[0,0],[10,0],[10,10],[0,10],[0,0]],[[5,5],[7,5],[7,7],[5,7],[5,5]]],[[[0,0],[10,0],[10,10],[0,10],[0,0]],[[5,5],[7,5],[7,7],[5,7],[5,5]]]]} 23 | `); 24 | 25 | export const Geometry = JSON.parse(` 26 | {"type":"Point","coordinates":[1,1]} 27 | `); 28 | 29 | export const GeometryCollection = JSON.parse(` 30 | {"type":"GeometryCollection","geometries":[{"type":"Point","coordinates":[1,1]},{"type":"LineString","coordinates":[[0,0],[1,1],[2,2],[3,3],[4,4]]}]} 31 | `); 32 | 33 | 34 | -------------------------------------------------------------------------------- /src/tests/integration/mysql/mysql.test.ts: -------------------------------------------------------------------------------- 1 | import { Sequelize } from 'sequelize-typescript'; 2 | import { QueryTypes } from 'sequelize'; 3 | import { ITestMetadata } from '../ITestMetadata'; 4 | import { TestRunner } from '../TestRunner'; 5 | import * as geometries from './geometries'; 6 | import { 7 | DATA_TYPES_TABLE_NAME, 8 | DATA_TYPES_TABLE_DROP, 9 | DATA_TYPES_TABLE_CREATES, 10 | INDICES_TABLE_NAME, 11 | INDICES_TABLE_DROP, 12 | INDICES_TABLE_CREATES, 13 | AUTHORS_TABLE_NAME, 14 | AUTHORS_TABLE_DROP, 15 | AUTHORS_TABLE_CREATES, 16 | AUTHORS_TABLE_INSERTS, 17 | BOOKS_TABLE_NAME, 18 | BOOKS_TABLE_DROP, 19 | BOOKS_TABLE_CREATES, 20 | BOOKS_TABLE_INSERTS, 21 | AUTHORS_BOOKS_TABLE_NAME, 22 | AUTHORS_BOOKS_TABLE_DROP, 23 | AUTHORS_BOOKS_TABLE_CREATES, 24 | AUTHORS_BOOKS_TABLE_INSERTS, 25 | RACES_TABLE_NAME, 26 | RACES_TABLE_DROP, 27 | RACES_TABLE_CREATES, 28 | RACES_TABLE_INSERTS, 29 | UNITS_TABLE_NAME, 30 | UNITS_TABLE_DROP, 31 | UNITS_TABLE_CREATES, 32 | UNITS_TABLE_INSERTS, 33 | PERSON_TABLE_NAME, 34 | PERSON_TABLE_DROP, 35 | PERSON_TABLE_CREATES, 36 | PERSON_TABLE_INSERTS, 37 | PASSPORT_TABLE_NAME, 38 | PASSPORT_TABLE_DROP, 39 | PASSPORT_TABLE_CREATES, 40 | PASSPORT_TABLE_INSERTS, 41 | AUTHORS_VIEW_NAME, 42 | AUTHORS_VIEW_DROP, 43 | AUTHORS_VIEW_CREATES, 44 | } from './queries'; 45 | 46 | interface INativeType { 47 | DATA_TYPE: string; 48 | data_type: string; 49 | } 50 | 51 | const testMetadata: ITestMetadata = { 52 | name: 'MySQL', 53 | dialect: 'mysql', 54 | testTables: [ 55 | { 56 | name: DATA_TYPES_TABLE_NAME, 57 | createQueries: DATA_TYPES_TABLE_CREATES, 58 | dropQuery: DATA_TYPES_TABLE_DROP, 59 | }, 60 | { 61 | name: INDICES_TABLE_NAME, 62 | createQueries: INDICES_TABLE_CREATES, 63 | dropQuery: INDICES_TABLE_DROP, 64 | }, 65 | { 66 | name: AUTHORS_TABLE_NAME, 67 | createQueries: AUTHORS_TABLE_CREATES, 68 | dropQuery: AUTHORS_TABLE_DROP, 69 | insertQueries: AUTHORS_TABLE_INSERTS, 70 | }, 71 | { 72 | name: BOOKS_TABLE_NAME, 73 | createQueries: BOOKS_TABLE_CREATES, 74 | dropQuery: BOOKS_TABLE_DROP, 75 | insertQueries: BOOKS_TABLE_INSERTS, 76 | }, 77 | { 78 | name: AUTHORS_BOOKS_TABLE_NAME, 79 | createQueries: AUTHORS_BOOKS_TABLE_CREATES, 80 | dropQuery: AUTHORS_BOOKS_TABLE_DROP, 81 | insertQueries: AUTHORS_BOOKS_TABLE_INSERTS, 82 | }, 83 | { 84 | name: RACES_TABLE_NAME, 85 | createQueries: RACES_TABLE_CREATES, 86 | dropQuery: RACES_TABLE_DROP, 87 | insertQueries: RACES_TABLE_INSERTS, 88 | }, 89 | { 90 | name: UNITS_TABLE_NAME, 91 | createQueries: UNITS_TABLE_CREATES, 92 | dropQuery: UNITS_TABLE_DROP, 93 | insertQueries: UNITS_TABLE_INSERTS, 94 | }, 95 | { 96 | name: PERSON_TABLE_NAME, 97 | createQueries: PERSON_TABLE_CREATES, 98 | dropQuery: PERSON_TABLE_DROP, 99 | insertQueries: PERSON_TABLE_INSERTS, 100 | }, 101 | { 102 | name: PASSPORT_TABLE_NAME, 103 | createQueries: PASSPORT_TABLE_CREATES, 104 | dropQuery: PASSPORT_TABLE_DROP, 105 | insertQueries: PASSPORT_TABLE_INSERTS, 106 | }, 107 | ], 108 | testViews: [ 109 | { 110 | name: AUTHORS_VIEW_NAME, 111 | createQueries: AUTHORS_VIEW_CREATES, 112 | dropQuery: AUTHORS_VIEW_DROP, 113 | } 114 | ], 115 | filterTables: [ DATA_TYPES_TABLE_NAME ], 116 | filterSkipTables: [ INDICES_TABLE_NAME ], 117 | dataTypes: { 118 | dataTypesTable: DATA_TYPES_TABLE_NAME, 119 | async getColumnNativeDataType( 120 | connection: Sequelize, 121 | schema: string, 122 | table: string, 123 | column: string): Promise 124 | { 125 | const query = ` 126 | SELECT DATA_TYPE 127 | FROM information_schema.columns 128 | WHERE table_schema='${schema}' AND table_name='${table}' AND column_name='${column}'; 129 | `; 130 | 131 | const res = await connection.query(query, { 132 | type: QueryTypes.SELECT, 133 | raw: true, 134 | }) as INativeType[]; 135 | 136 | return res[0].DATA_TYPE ?? res[0].data_type; 137 | }, 138 | testValues: [ 139 | ['bigint', 100000000000000000], 140 | ['smallint', 32767], 141 | ['mediumint', 8388607], 142 | ['tinyint', 127], 143 | ['decimal', '99.999'], 144 | ['float', 66.78], 145 | ['double', 11.2345], 146 | ['int', 2147483647], 147 | ['varchar', 'Hello world'], 148 | ['char', 'a'], 149 | ['tinytext', 'xyz'], 150 | ['mediumtext', 'Voodoo Lady'], 151 | ['longtext', 'Supercalifragilisticexpialidocious'], 152 | ['text', 'Access denied'], 153 | ['date', '2020-01-01'], 154 | ['time', '23:59:59'], 155 | ['datetime', new Date()], 156 | ['timestamp', new Date()], 157 | ['year', new Date().getFullYear()], 158 | ['enum', 'BB'], 159 | ['set', 'X'], 160 | ['bit', 127], 161 | ['binary', Buffer.from('A')], 162 | ['blob', Buffer.from('Not authorized')], 163 | ['tinyblob', Buffer.from('xyz')], 164 | ['mediumblob', Buffer.from('Voodoo Lady')], 165 | ['longblob', Buffer.from('Supercalifragilisticexpialidocious')], 166 | ['point', geometries.Point], 167 | ['multipoint', geometries.MultiPoint], 168 | ['linestring', geometries.LineString], 169 | ['multilinestring', geometries.MultiLineString], 170 | ['polygon', geometries.Polygon], 171 | ['multipolygon', geometries.MultiPolygon], 172 | ['geometry', geometries.Geometry], 173 | // ['geometrycollection', geometries.GeometryCollection], 174 | ['json', JSON.parse('{"key1": "value1", "key2": "value2"}')], 175 | ] 176 | }, 177 | associations: { 178 | leftTableOneToOne: PERSON_TABLE_NAME, 179 | rightTableOneToOne: PASSPORT_TABLE_NAME, 180 | leftTableOneToMany: RACES_TABLE_NAME, 181 | rightTableOneToMany: UNITS_TABLE_NAME, 182 | leftTableManyToMany: AUTHORS_TABLE_NAME, 183 | rightTableManyToMany: BOOKS_TABLE_NAME, 184 | }, 185 | }; 186 | 187 | const testRunner = new TestRunner(testMetadata); 188 | testRunner.run(); 189 | -------------------------------------------------------------------------------- /src/tests/integration/mysql/queries.ts: -------------------------------------------------------------------------------- 1 | export const DATA_TYPES_TABLE_NAME = 'data_types'; 2 | export const DATA_TYPES_TABLE_DROP = `DROP TABLE IF EXISTS ${DATA_TYPES_TABLE_NAME} CASCADE`; 3 | export const DATA_TYPES_TABLE_CREATES = [ 4 | ` 5 | CREATE TABLE ${DATA_TYPES_TABLE_NAME} 6 | ( 7 | id INT AUTO_INCREMENT PRIMARY KEY, 8 | f_bit BIT(7) null, 9 | f_bigint BIGINT null, 10 | f_smallint SMALLINT null, 11 | f_mediumint MEDIUMINT null, 12 | f_tinyint TINYINT null, 13 | f_decimal DECIMAL(7, 3) null, 14 | f_float FLOAT(5, 3) null, 15 | f_double DOUBLE(7, 4) null, 16 | f_int INT null, 17 | f_varchar VARCHAR(80) null, 18 | f_char CHAR(10) null, 19 | f_tinytext TINYTEXT null, 20 | f_mediumtext MEDIUMTEXT null, 21 | f_longtext LONGTEXT null, 22 | f_text TEXT null, 23 | f_date DATE null, 24 | f_time TIME null, 25 | f_datetime DATETIME(6) null, 26 | f_timestamp TIMESTAMP(3) null, 27 | f_year YEAR null, 28 | f_enum ENUM ('AA', 'BB') null, 29 | f_set SET ('X', 'Y') null, 30 | f_binary BINARY null, 31 | f_blob BLOB null, 32 | f_tinyblob TINYBLOB null, 33 | f_mediumblob MEDIUMBLOB null, 34 | f_longblob LONGBLOB null, 35 | f_point POINT null, 36 | f_multipoint MULTIPOINT null, 37 | f_linestring LINESTRING null, 38 | f_multilinestring MULTILINESTRING null, 39 | f_polygon POLYGON null, 40 | f_multipolygon MULTIPOLYGON null, 41 | f_geometry GEOMETRY null, 42 | f_json JSON null 43 | ) CHARSET = 'latin1'; 44 | `, 45 | ]; 46 | 47 | export const INDICES_TABLE_NAME = 'indices'; 48 | export const INDICES_TABLE_DROP = `DROP TABLE IF EXISTS ${INDICES_TABLE_NAME} CASCADE`; 49 | export const INDICES_TABLE_CREATES = [ 50 | ` 51 | CREATE TABLE ${INDICES_TABLE_NAME} 52 | ( 53 | id int auto_increment primary key, 54 | f_unique bigint null, 55 | f_multi_1 int not null, 56 | f_multi_2 varchar(80) null, 57 | CONSTRAINT indices_f_multi_1_uindex UNIQUE (f_multi_1), 58 | CONSTRAINT indices_f_unique_uindex UNIQUE (f_unique) 59 | ) CHARSET = 'latin1'; 60 | `, 61 | ` 62 | CREATE INDEX indices_f_multi_1_f_multi_2_index 63 | ON ${INDICES_TABLE_NAME} (f_multi_1, f_multi_2); 64 | `, 65 | ]; 66 | 67 | export const AUTHORS_TABLE_NAME = 'authors'; 68 | export const AUTHORS_TABLE_DROP = `DROP TABLE IF EXISTS ${AUTHORS_TABLE_NAME} CASCADE`; 69 | export const AUTHORS_TABLE_CREATES = [ 70 | ` 71 | CREATE TABLE ${AUTHORS_TABLE_NAME} 72 | ( 73 | author_id INT primary key, 74 | full_name VARCHAR(80) not null 75 | ); 76 | `, 77 | ]; 78 | export const AUTHORS_TABLE_INSERTS = [ 79 | `INSERT INTO ${AUTHORS_TABLE_NAME} VALUES (1, 'Isasc Asimov');`, 80 | `INSERT INTO ${AUTHORS_TABLE_NAME} VALUES (2, 'James Clavell');`, 81 | ]; 82 | 83 | export const BOOKS_TABLE_NAME = 'books'; 84 | export const BOOKS_TABLE_DROP = `DROP TABLE IF EXISTS ${BOOKS_TABLE_NAME} CASCADE`; 85 | export const BOOKS_TABLE_CREATES = [ 86 | ` 87 | CREATE TABLE ${BOOKS_TABLE_NAME} 88 | ( 89 | book_id INT PRIMARY KEY, 90 | title VARCHAR(80) not null 91 | ); 92 | `, 93 | ]; 94 | export const BOOKS_TABLE_INSERTS = [ 95 | `INSERT INTO ${BOOKS_TABLE_NAME} VALUES (1, 'Prelude to Foundation');`, 96 | `INSERT INTO ${BOOKS_TABLE_NAME} VALUES (2, 'The End of Eternity');`, 97 | `INSERT INTO ${BOOKS_TABLE_NAME} VALUES (3, 'Shogun');`, 98 | `INSERT INTO ${BOOKS_TABLE_NAME} VALUES (4, 'Galactic Shogun');`, 99 | ]; 100 | 101 | export const AUTHORS_BOOKS_TABLE_NAME = 'authors_books'; 102 | export const AUTHORS_BOOKS_TABLE_DROP = `DROP TABLE IF EXISTS ${AUTHORS_BOOKS_TABLE_NAME} CASCADE`; 103 | export const AUTHORS_BOOKS_TABLE_CREATES = [ 104 | ` 105 | CREATE TABLE ${AUTHORS_BOOKS_TABLE_NAME} 106 | ( 107 | author_id INT not null, 108 | book_id INT not null, 109 | PRIMARY KEY (author_id, book_id) 110 | ); 111 | `, 112 | ]; 113 | export const AUTHORS_BOOKS_TABLE_INSERTS = [ 114 | `INSERT INTO ${AUTHORS_BOOKS_TABLE_NAME} VALUES (1, 1);`, 115 | `INSERT INTO ${AUTHORS_BOOKS_TABLE_NAME} VALUES (1, 2);`, 116 | `INSERT INTO ${AUTHORS_BOOKS_TABLE_NAME} VALUES (1, 4);`, 117 | `INSERT INTO ${AUTHORS_BOOKS_TABLE_NAME} VALUES (2, 3);`, 118 | `INSERT INTO ${AUTHORS_BOOKS_TABLE_NAME} VALUES (2, 4);`, 119 | ]; 120 | 121 | export const RACES_TABLE_NAME = 'races'; 122 | export const RACES_TABLE_DROP = `DROP TABLE IF EXISTS ${RACES_TABLE_NAME} CASCADE`; 123 | export const RACES_TABLE_CREATES = [ 124 | ` 125 | CREATE TABLE ${RACES_TABLE_NAME} 126 | ( 127 | race_id INT PRIMARY KEY, 128 | race_name VARCHAR(80) NOT NULL 129 | ); 130 | `, 131 | ]; 132 | export const RACES_TABLE_INSERTS = [ 133 | `INSERT INTO ${RACES_TABLE_NAME} VALUES(1, 'Orcs');`, 134 | `INSERT INTO ${RACES_TABLE_NAME} VALUES(2, 'Humans');`, 135 | `INSERT INTO ${RACES_TABLE_NAME} VALUES(3, 'Night Elves');`, 136 | `INSERT INTO ${RACES_TABLE_NAME} VALUES(4, 'Undead');`, 137 | ]; 138 | 139 | export const UNITS_TABLE_NAME = 'units'; 140 | export const UNITS_TABLE_DROP = `DROP TABLE IF EXISTS ${UNITS_TABLE_NAME} CASCADE`; 141 | export const UNITS_TABLE_CREATES = [ 142 | ` 143 | CREATE TABLE ${UNITS_TABLE_NAME} 144 | ( 145 | unit_id INT PRIMARY KEY, 146 | unit_name VARCHAR(80) NOT NULL, 147 | race_id INT NOT NULL 148 | ); 149 | `, 150 | ]; 151 | export const UNITS_TABLE_INSERTS = [ 152 | `INSERT INTO ${UNITS_TABLE_NAME} VALUES(1, 'Tauren Warrior', 1);`, 153 | `INSERT INTO ${UNITS_TABLE_NAME} VALUES(2, 'Kodo Beast', 1);`, 154 | `INSERT INTO ${UNITS_TABLE_NAME} VALUES(3, 'Rifleman', 2);`, 155 | `INSERT INTO ${UNITS_TABLE_NAME} VALUES(4, 'Dryad', 3);`, 156 | `INSERT INTO ${UNITS_TABLE_NAME} VALUES(5, 'Archer', 3);`, 157 | `INSERT INTO ${UNITS_TABLE_NAME} VALUES(6, 'Ghoul', 4);`, 158 | `INSERT INTO ${UNITS_TABLE_NAME} VALUES(7, 'Frost Wyrm', 4);`, 159 | ]; 160 | 161 | export const PERSON_TABLE_NAME = 'person'; 162 | export const PERSON_TABLE_DROP = `DROP TABLE IF EXISTS ${PERSON_TABLE_NAME} CASCADE`; 163 | export const PERSON_TABLE_CREATES = [ 164 | ` 165 | CREATE TABLE ${PERSON_TABLE_NAME} 166 | ( 167 | person_id INT PRIMARY KEY, 168 | name VARCHAR(80) NOT NULL, 169 | passport_id INT NOT NULL 170 | ); 171 | `, 172 | ]; 173 | export const PERSON_TABLE_INSERTS = [ 174 | `INSERT INTO ${PERSON_TABLE_NAME} VALUES(1, 'Arthas', 1);`, 175 | ]; 176 | 177 | export const PASSPORT_TABLE_NAME = 'passport'; 178 | export const PASSPORT_TABLE_DROP = `DROP TABLE IF EXISTS ${PASSPORT_TABLE_NAME} CASCADE`; 179 | export const PASSPORT_TABLE_CREATES = [ 180 | ` 181 | CREATE TABLE ${PASSPORT_TABLE_NAME} 182 | ( 183 | passport_id INT PRIMARY KEY, 184 | code VARCHAR(80) NOT NULL 185 | ); 186 | `, 187 | ]; 188 | export const PASSPORT_TABLE_INSERTS = [ 189 | `INSERT INTO ${PASSPORT_TABLE_NAME} VALUES(1, 'Frostmourne');`, 190 | ]; 191 | 192 | export const AUTHORS_VIEW_NAME = 'authors_view'; 193 | export const AUTHORS_VIEW_DROP = `DROP VIEW IF EXISTS ${AUTHORS_VIEW_NAME}`; 194 | export const AUTHORS_VIEW_CREATES = [ 195 | `CREATE OR REPLACE VIEW ${AUTHORS_VIEW_NAME} AS SELECT full_name FROM ${AUTHORS_TABLE_NAME}`, 196 | ]; 197 | -------------------------------------------------------------------------------- /src/tests/integration/postgres/docker-start-postgres.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | set -e 3 | 4 | DIR="${BASH_SOURCE%/*}" 5 | if [[ ! -d "$DIR" ]]; then DIR="$PWD"; fi 6 | . "$DIR/../test-env.sh" 7 | 8 | IMAGE_NAME="postgres" 9 | CONTAINER_NAME="postgres" 10 | 11 | if [[ -z "${DOCKER_POSTGRES_TAG}" ]]; then 12 | IMAGE_TAG="16" 13 | else 14 | IMAGE_TAG="${DOCKER_POSTGRES_TAG}" 15 | fi 16 | 17 | IMAGE_FULL_NAME="$IMAGE_NAME:$IMAGE_TAG" 18 | 19 | docker pull "$IMAGE_FULL_NAME" 20 | 21 | docker run -d --name "$CONTAINER_NAME" \ 22 | -e POSTGRES_DB="$TEST_DB_DATABASE" \ 23 | -e POSTGRES_USER="$TEST_DB_USERNAME" \ 24 | -e POSTGRES_PASSWORD="$TEST_DB_PASSWORD" \ 25 | -p "$TEST_DB_PORT":5432 \ 26 | "$IMAGE_FULL_NAME" 27 | 28 | # Wait until database becomes online 29 | until docker logs --tail all ${CONTAINER_NAME} 2>&1 | grep -c "PostgreSQL init process complete; ready for start up." > /dev/null; do 30 | echo "Waiting database to become online..." 31 | sleep 5 32 | done 33 | 34 | echo "Database online" 35 | -------------------------------------------------------------------------------- /src/tests/integration/postgres/postgres.test.ts: -------------------------------------------------------------------------------- 1 | import { ITestMetadata } from '../ITestMetadata'; 2 | import { Sequelize } from 'sequelize-typescript'; 3 | import { TestRunner } from '../TestRunner'; 4 | import { QueryTypes } from 'sequelize'; 5 | import { 6 | SCHEMA_DROP, 7 | SCHEMA_CREATE, 8 | DATA_TYPES_TABLE_NAME, 9 | DATA_TYPES_TABLE_CREATES, 10 | DATA_TYPES_TABLE_DROP, 11 | INDICES_TABLE_NAME, 12 | INDICES_TABLE_CREATES, 13 | INDICES_TABLE_DROP, 14 | AUTHORS_TABLE_NAME, 15 | AUTHORS_TABLE_DROP, 16 | AUTHORS_TABLE_CREATES, 17 | AUTHORS_TABLE_INSERTS, 18 | BOOKS_TABLE_NAME, 19 | BOOKS_TABLE_DROP, 20 | BOOKS_TABLE_CREATES, 21 | BOOKS_TABLE_INSERTS, 22 | AUTHORS_BOOKS_TABLE_NAME, 23 | AUTHORS_BOOKS_TABLE_DROP, 24 | AUTHORS_BOOKS_TABLE_CREATES, 25 | AUTHORS_BOOKS_TABLE_INSERTS, 26 | RACES_TABLE_NAME, 27 | RACES_TABLE_DROP, 28 | RACES_TABLE_CREATES, 29 | RACES_TABLE_INSERTS, 30 | UNITS_TABLE_NAME, 31 | UNITS_TABLE_DROP, 32 | UNITS_TABLE_CREATES, 33 | UNITS_TABLE_INSERTS, 34 | PERSON_TABLE_NAME, 35 | PERSON_TABLE_DROP, 36 | PERSON_TABLE_CREATES, 37 | PERSON_TABLE_INSERTS, 38 | PASSPORT_TABLE_NAME, 39 | PASSPORT_TABLE_DROP, 40 | PASSPORT_TABLE_CREATES, 41 | PASSPORT_TABLE_INSERTS, 42 | } from "./queries"; 43 | 44 | interface INativeType { 45 | udt_name: string; 46 | UDT_NAME: string; 47 | } 48 | 49 | const testMetadata: ITestMetadata = { 50 | name: 'Postgres', 51 | dialect: 'postgres', 52 | schema: { 53 | name: process.env.TEST_DB_SCHEMA ?? 'public', 54 | createQuery: SCHEMA_CREATE, 55 | dropQuery: SCHEMA_DROP, 56 | }, 57 | testTables: [ 58 | { 59 | name: DATA_TYPES_TABLE_NAME, 60 | createQueries: DATA_TYPES_TABLE_CREATES, 61 | dropQuery: DATA_TYPES_TABLE_DROP, 62 | }, 63 | { 64 | name: INDICES_TABLE_NAME, 65 | createQueries: INDICES_TABLE_CREATES, 66 | dropQuery: INDICES_TABLE_DROP, 67 | }, 68 | { 69 | name: AUTHORS_TABLE_NAME, 70 | createQueries: AUTHORS_TABLE_CREATES, 71 | dropQuery: AUTHORS_TABLE_DROP, 72 | insertQueries: AUTHORS_TABLE_INSERTS, 73 | }, 74 | { 75 | name: BOOKS_TABLE_NAME, 76 | createQueries: BOOKS_TABLE_CREATES, 77 | dropQuery: BOOKS_TABLE_DROP, 78 | insertQueries: BOOKS_TABLE_INSERTS, 79 | }, 80 | { 81 | name: AUTHORS_BOOKS_TABLE_NAME, 82 | createQueries: AUTHORS_BOOKS_TABLE_CREATES, 83 | dropQuery: AUTHORS_BOOKS_TABLE_DROP, 84 | insertQueries: AUTHORS_BOOKS_TABLE_INSERTS, 85 | }, 86 | { 87 | name: RACES_TABLE_NAME, 88 | createQueries: RACES_TABLE_CREATES, 89 | dropQuery: RACES_TABLE_DROP, 90 | insertQueries: RACES_TABLE_INSERTS, 91 | }, 92 | { 93 | name: UNITS_TABLE_NAME, 94 | createQueries: UNITS_TABLE_CREATES, 95 | dropQuery: UNITS_TABLE_DROP, 96 | insertQueries: UNITS_TABLE_INSERTS, 97 | }, 98 | { 99 | name: PERSON_TABLE_NAME, 100 | createQueries: PERSON_TABLE_CREATES, 101 | dropQuery: PERSON_TABLE_DROP, 102 | insertQueries: PERSON_TABLE_INSERTS, 103 | }, 104 | { 105 | name: PASSPORT_TABLE_NAME, 106 | createQueries: PASSPORT_TABLE_CREATES, 107 | dropQuery: PASSPORT_TABLE_DROP, 108 | insertQueries: PASSPORT_TABLE_INSERTS, 109 | }, 110 | ], 111 | filterTables: [ DATA_TYPES_TABLE_NAME ], 112 | filterSkipTables: [ INDICES_TABLE_NAME ], 113 | dataTypes: { 114 | dataTypesTable: DATA_TYPES_TABLE_NAME, 115 | async getColumnNativeDataType( 116 | connection: Sequelize, 117 | schema: string, 118 | table: string, 119 | column: string): Promise 120 | { 121 | const query = ` 122 | SELECT udt_name 123 | FROM information_schema.columns 124 | WHERE table_schema='${schema}' AND table_name='${table}' AND column_name='${column}'; 125 | `; 126 | 127 | const res = await connection.query(query, { 128 | type: QueryTypes.SELECT, 129 | raw: true, 130 | }) as INativeType[]; 131 | 132 | return res[0].udt_name ?? res[0].UDT_NAME; 133 | }, 134 | testValues: [ 135 | ['smallint', 32767], 136 | ['integer', 2147483647], 137 | ['bigint', '100000000000000000'], 138 | ['decimal', '99.999'], 139 | ['numeric', '66.78'], 140 | ['real', 66.66], 141 | ['double', 11.2345], 142 | ['money', '$100,000.00'], 143 | ['varchar', 'Hello world'], 144 | ['char', 'A'], 145 | ['character', 'AB'], 146 | ['text', 'xYz'], 147 | ['cidr', '10.0.0.0/16'], 148 | ['inet', '192.168.100.128/25'], 149 | ['macaddr', '08:00:2b:01:02:03'], 150 | ['macaddr8', '08:00:2b:01:02:03:04:05'], 151 | ['bit', '1'], 152 | ['varbit', '101'], 153 | ['uuid', 'a0eebc99-9c0b-4ef8-bb6d-6bb9bd380a11'], 154 | ['xml', `bar`], 155 | ['bytea', Buffer.from('A')], 156 | ['timestamp', new Date()], 157 | ['timestamptz', new Date()], 158 | ['date', '2020-01-01'], 159 | ['time', '23:59:59'], 160 | ['timetz', '23:59:59+00'], 161 | ['boolean', true], 162 | // ['point', ''], // need PostGIS extension installed on Postgres 163 | // ['line', ''], // need PostGIS extension installed on Postgres 164 | // ['lseg', ''], // need PostGIS extension installed on Postgres 165 | // ['box', ''], // need PostGIS extension installed on Postgres 166 | // ['path', ''], // need PostGIS extension installed on Postgres 167 | // ['polygon', ''], // need PostGIS extension installed on Postgres 168 | // ['circle', ''], // need PostGIS extension installed on Postgres 169 | ['json', JSON.parse('{"key1": "value1", "key2": "value2"}')], 170 | ['jsonb', JSON.parse('{"key1": "value1", "key2": "value2"}')], 171 | ], 172 | }, 173 | associations: { 174 | leftTableOneToOne: PERSON_TABLE_NAME, 175 | rightTableOneToOne: PASSPORT_TABLE_NAME, 176 | leftTableOneToMany: RACES_TABLE_NAME, 177 | rightTableOneToMany: UNITS_TABLE_NAME, 178 | leftTableManyToMany: AUTHORS_TABLE_NAME, 179 | rightTableManyToMany: BOOKS_TABLE_NAME, 180 | }, 181 | }; 182 | 183 | const testRunner = new TestRunner(testMetadata); 184 | testRunner.run(); 185 | -------------------------------------------------------------------------------- /src/tests/integration/postgres/queries.ts: -------------------------------------------------------------------------------- 1 | export const SCHEMA_NAME = process.env.TEST_DB_SCHEMA || 'public'; 2 | export const SCHEMA_DROP = `DROP SCHEMA IF EXISTS ${SCHEMA_NAME} CASCADE`; 3 | export const SCHEMA_CREATE = `CREATE SCHEMA IF NOT EXISTS ${SCHEMA_NAME}`; 4 | 5 | export const DATA_TYPES_TABLE_NAME = 'data_types'; 6 | export const DATA_TYPES_TABLE_DROP = `DROP TABLE IF EXISTS ${SCHEMA_NAME}.${DATA_TYPES_TABLE_NAME} CASCADE`; 7 | export const DATA_TYPES_TABLE_CREATES = [ 8 | ` 9 | CREATE TABLE ${SCHEMA_NAME}.${DATA_TYPES_TABLE_NAME} 10 | ( 11 | id serial not null constraint data_types_pk primary key, 12 | f_smallint smallint, 13 | f_integer integer, 14 | f_bigint bigint, 15 | f_decimal numeric(7, 3), 16 | f_numeric numeric(5, 2) DEFAULT 9.99, 17 | f_real real, 18 | f_double double precision, 19 | -- f_smallserial smallserial not null, 20 | -- f_serial serial not null, 21 | -- f_bigserial bigserial not null, 22 | f_money money, 23 | f_varchar varchar(80) DEFAULT 'Morpheus', 24 | f_char char(1), 25 | f_character char(2), 26 | f_text text, 27 | f_cidr cidr, 28 | f_inet inet, 29 | f_macaddr macaddr, 30 | f_macaddr8 macaddr8, 31 | f_bit bit, 32 | f_varbit bit varying, 33 | f_uuid uuid, 34 | f_xml xml, 35 | f_bytea bytea, 36 | f_timestamp timestamp(6), 37 | f_timestamptz timestamptz, 38 | f_date date, 39 | f_time time, 40 | f_timetz timetz, 41 | f_boolean boolean, 42 | f_point point, 43 | f_line line, 44 | f_lseg lseg, 45 | f_box box, 46 | f_path path, 47 | f_polygon polygon, 48 | f_circle circle, 49 | f_json json, 50 | f_jsonb jsonb 51 | ) 52 | `, 53 | ]; 54 | 55 | export const INDICES_TABLE_NAME = 'indices'; 56 | export const INDICES_TABLE_DROP = `DROP TABLE IF EXISTS ${SCHEMA_NAME}.${INDICES_TABLE_NAME} CASCADE`; 57 | export const INDICES_TABLE_CREATES = [ 58 | ` 59 | CREATE TABLE ${SCHEMA_NAME}.${INDICES_TABLE_NAME} 60 | ( 61 | id SERIAL CONSTRAINT indices_pk PRIMARY KEY, 62 | f_unique CHAR(1), 63 | f_multi_1 VARCHAR(3), 64 | f_multi_2 INT, 65 | f_not_unique CHAR(2) 66 | ); 67 | `, 68 | ` 69 | CREATE UNIQUE INDEX indices_f_multi_1_f_multi_2_uindex 70 | ON ${SCHEMA_NAME}.${INDICES_TABLE_NAME} (f_multi_1, f_multi_2); 71 | `, 72 | ` 73 | CREATE UNIQUE INDEX indices_f_multi_1_uindex 74 | ON ${SCHEMA_NAME}.${INDICES_TABLE_NAME} (f_multi_1); 75 | `, 76 | ` 77 | CREATE INDEX indices_f_not_unique_index 78 | ON ${SCHEMA_NAME}.${INDICES_TABLE_NAME} (f_not_unique DESC); 79 | `, 80 | ` 81 | CREATE UNIQUE INDEX indices_f_unique_uindex 82 | ON ${SCHEMA_NAME}.${INDICES_TABLE_NAME} (f_unique); 83 | `, 84 | ]; 85 | 86 | export const AUTHORS_TABLE_NAME = 'authors'; 87 | export const AUTHORS_TABLE_DROP = `DROP TABLE IF EXISTS ${AUTHORS_TABLE_NAME} CASCADE`; 88 | export const AUTHORS_TABLE_CREATES = [ 89 | ` 90 | CREATE TABLE ${SCHEMA_NAME}.${AUTHORS_TABLE_NAME} 91 | ( 92 | author_id INT primary key, 93 | full_name VARCHAR(80) not null 94 | ); 95 | `, 96 | ]; 97 | export const AUTHORS_TABLE_INSERTS = [ 98 | `INSERT INTO ${SCHEMA_NAME}.${AUTHORS_TABLE_NAME} VALUES (1, 'Isasc Asimov');`, 99 | `INSERT INTO ${SCHEMA_NAME}.${AUTHORS_TABLE_NAME} VALUES (2, 'James Clavell');`, 100 | ]; 101 | 102 | export const BOOKS_TABLE_NAME = 'books'; 103 | export const BOOKS_TABLE_DROP = `DROP TABLE IF EXISTS ${BOOKS_TABLE_NAME} CASCADE`; 104 | export const BOOKS_TABLE_CREATES = [ 105 | ` 106 | CREATE TABLE ${SCHEMA_NAME}.${BOOKS_TABLE_NAME} 107 | ( 108 | book_id INT PRIMARY KEY, 109 | title VARCHAR(80) not null 110 | ); 111 | `, 112 | ]; 113 | export const BOOKS_TABLE_INSERTS = [ 114 | `INSERT INTO ${SCHEMA_NAME}.${BOOKS_TABLE_NAME} VALUES (1, 'Prelude to Foundation');`, 115 | `INSERT INTO ${SCHEMA_NAME}.${BOOKS_TABLE_NAME} VALUES (2, 'The End of Eternity');`, 116 | `INSERT INTO ${SCHEMA_NAME}.${BOOKS_TABLE_NAME} VALUES (3, 'Shogun');`, 117 | `INSERT INTO ${SCHEMA_NAME}.${BOOKS_TABLE_NAME} VALUES (4, 'Galactic Shogun');`, 118 | ]; 119 | 120 | export const AUTHORS_BOOKS_TABLE_NAME = 'authors_books'; 121 | export const AUTHORS_BOOKS_TABLE_DROP = `DROP TABLE IF EXISTS ${AUTHORS_BOOKS_TABLE_NAME} CASCADE`; 122 | export const AUTHORS_BOOKS_TABLE_CREATES = [ 123 | ` 124 | CREATE TABLE ${SCHEMA_NAME}.${AUTHORS_BOOKS_TABLE_NAME} 125 | ( 126 | author_id INT not null, 127 | book_id INT not null, 128 | PRIMARY KEY (author_id, book_id) 129 | ); 130 | `, 131 | ]; 132 | export const AUTHORS_BOOKS_TABLE_INSERTS = [ 133 | `INSERT INTO ${SCHEMA_NAME}.${AUTHORS_BOOKS_TABLE_NAME} VALUES (1, 1);`, 134 | `INSERT INTO ${SCHEMA_NAME}.${AUTHORS_BOOKS_TABLE_NAME} VALUES (1, 2);`, 135 | `INSERT INTO ${SCHEMA_NAME}.${AUTHORS_BOOKS_TABLE_NAME} VALUES (1, 4);`, 136 | `INSERT INTO ${SCHEMA_NAME}.${AUTHORS_BOOKS_TABLE_NAME} VALUES (2, 3);`, 137 | `INSERT INTO ${SCHEMA_NAME}.${AUTHORS_BOOKS_TABLE_NAME} VALUES (2, 4);`, 138 | ]; 139 | 140 | export const RACES_TABLE_NAME = 'races'; 141 | export const RACES_TABLE_DROP = `DROP TABLE IF EXISTS ${RACES_TABLE_NAME} CASCADE`; 142 | export const RACES_TABLE_CREATES = [ 143 | ` 144 | CREATE TABLE ${SCHEMA_NAME}.${RACES_TABLE_NAME} 145 | ( 146 | race_id INT PRIMARY KEY, 147 | race_name VARCHAR(80) NOT NULL 148 | ); 149 | `, 150 | ]; 151 | export const RACES_TABLE_INSERTS = [ 152 | `INSERT INTO ${SCHEMA_NAME}.${RACES_TABLE_NAME} VALUES(1, 'Orcs');`, 153 | `INSERT INTO ${SCHEMA_NAME}.${RACES_TABLE_NAME} VALUES(2, 'Humans');`, 154 | `INSERT INTO ${SCHEMA_NAME}.${RACES_TABLE_NAME} VALUES(3, 'Night Elves');`, 155 | `INSERT INTO ${SCHEMA_NAME}.${RACES_TABLE_NAME} VALUES(4, 'Undead');`, 156 | ]; 157 | 158 | export const UNITS_TABLE_NAME = 'units'; 159 | export const UNITS_TABLE_DROP = `DROP TABLE IF EXISTS ${UNITS_TABLE_NAME} CASCADE`; 160 | export const UNITS_TABLE_CREATES = [ 161 | ` 162 | CREATE TABLE ${SCHEMA_NAME}.${UNITS_TABLE_NAME} 163 | ( 164 | unit_id INT PRIMARY KEY, 165 | unit_name VARCHAR(80) NOT NULL, 166 | race_id INT NOT NULL 167 | ); 168 | `, 169 | ]; 170 | export const UNITS_TABLE_INSERTS = [ 171 | `INSERT INTO ${SCHEMA_NAME}.${UNITS_TABLE_NAME} VALUES(1, 'Tauren Warrior', 1);`, 172 | `INSERT INTO ${SCHEMA_NAME}.${UNITS_TABLE_NAME} VALUES(2, 'Kodo Beast', 1);`, 173 | `INSERT INTO ${SCHEMA_NAME}.${UNITS_TABLE_NAME} VALUES(3, 'Rifleman', 2);`, 174 | `INSERT INTO ${SCHEMA_NAME}.${UNITS_TABLE_NAME} VALUES(4, 'Dryad', 3);`, 175 | `INSERT INTO ${SCHEMA_NAME}.${UNITS_TABLE_NAME} VALUES(5, 'Archer', 3);`, 176 | `INSERT INTO ${SCHEMA_NAME}.${UNITS_TABLE_NAME} VALUES(6, 'Ghoul', 4);`, 177 | `INSERT INTO ${SCHEMA_NAME}.${UNITS_TABLE_NAME} VALUES(7, 'Frost Wyrm', 4);`, 178 | ]; 179 | 180 | export const PERSON_TABLE_NAME = 'person'; 181 | export const PERSON_TABLE_DROP = `DROP TABLE IF EXISTS ${PERSON_TABLE_NAME} CASCADE`; 182 | export const PERSON_TABLE_CREATES = [ 183 | ` 184 | CREATE TABLE ${SCHEMA_NAME}.${PERSON_TABLE_NAME} 185 | ( 186 | person_id INT PRIMARY KEY, 187 | name VARCHAR(80) NOT NULL, 188 | passport_id INT NOT NULL 189 | ); 190 | `, 191 | ]; 192 | export const PERSON_TABLE_INSERTS = [ 193 | `INSERT INTO ${SCHEMA_NAME}.${PERSON_TABLE_NAME} VALUES(1, 'Arthas', 1);`, 194 | ]; 195 | 196 | export const PASSPORT_TABLE_NAME = 'passport'; 197 | export const PASSPORT_TABLE_DROP = `DROP TABLE IF EXISTS ${PASSPORT_TABLE_NAME} CASCADE`; 198 | export const PASSPORT_TABLE_CREATES = [ 199 | ` 200 | CREATE TABLE ${SCHEMA_NAME}.${PASSPORT_TABLE_NAME} 201 | ( 202 | passport_id INT PRIMARY KEY, 203 | code VARCHAR(80) NOT NULL 204 | ); 205 | `, 206 | ]; 207 | export const PASSPORT_TABLE_INSERTS = [ 208 | `INSERT INTO ${SCHEMA_NAME}.${PASSPORT_TABLE_NAME} VALUES(1, 'Frostmourne');`, 209 | ]; 210 | -------------------------------------------------------------------------------- /src/tests/integration/sqlite/queries.ts: -------------------------------------------------------------------------------- 1 | export const DATA_TYPES_TABLE_NAME = 'data_types'; 2 | export const DATA_TYPES_TABLE_DROP = `DROP TABLE IF EXISTS ${DATA_TYPES_TABLE_NAME};`; 3 | export const DATA_TYPES_TABLE_CREATES = [ 4 | ` 5 | CREATE TABLE ${DATA_TYPES_TABLE_NAME} 6 | ( 7 | id INTEGER CONSTRAINT data_types_pk PRIMARY KEY AUTOINCREMENT, 8 | f_int INTEGER, 9 | f_integer INTEGER, 10 | f_tinyint TINYINT, 11 | f_smallint SMALLINT, 12 | f_mediumint MEDIUMINT, 13 | f_bigint BIGINT, 14 | f_unsigned_big_int UNSIGNED BIG SHIT, 15 | f_int2 INT2, 16 | f_int8 INT8, 17 | f_real REAL, 18 | f_double DOUBLE, 19 | f_double_precision DOUBLE PRECISION, 20 | f_float FLOAT, 21 | f_numeric NUMERIC(7,2), 22 | f_decimal DECIMAL(6,2), 23 | f_date XYZ, 24 | f_datetime DATETIME, 25 | f_timestamp TIMESTAMP, 26 | f_time TIME, 27 | f_varchar VARCHAR(80), 28 | f_character CHARACTER(1), 29 | f_varying_character VARYING CHARACTER, 30 | f_nchar NCHAR(1), 31 | f_native_character NATIVE CHARACTER, 32 | f_nvarchar NVARCHAR, 33 | f_text TEXT, 34 | f_clob CLOB, 35 | f_boolean BOOLEAN, 36 | f_blob BLOB 37 | ); 38 | ` 39 | ]; 40 | 41 | export const INDICES_TABLE_NAME = 'indices'; 42 | export const INDICES_TABLE_DROP = `DROP TABLE IF EXISTS ${INDICES_TABLE_NAME}`; 43 | export const INDICES_TABLE_CREATES = [ 44 | ` 45 | CREATE TABLE ${INDICES_TABLE_NAME} 46 | ( 47 | id INTEGER CONSTRAINT indices_pk PRIMARY KEY AUTOINCREMENT, 48 | f_unique INTEGER not null, 49 | f_multi_1 INTEGER, 50 | f_multi_2 VARCHAR(80), 51 | f_not_unique INTEGER 52 | ); 53 | `, 54 | ` 55 | CREATE UNIQUE INDEX indices_f_multi_1_f_multi_2_uindex 56 | ON ${INDICES_TABLE_NAME} (f_multi_1, f_multi_2); 57 | `, 58 | ` 59 | CREATE UNIQUE INDEX indices_f_multi_1_uindex 60 | ON ${INDICES_TABLE_NAME} (f_multi_1); 61 | `, 62 | ` 63 | CREATE INDEX indices_f_not_unique_index 64 | ON ${INDICES_TABLE_NAME} (f_not_unique); 65 | `, 66 | ` 67 | CREATE UNIQUE INDEX indices_f_unique_uindex 68 | ON ${INDICES_TABLE_NAME} (f_unique); 69 | `, 70 | ]; 71 | 72 | export const AUTHORS_TABLE_NAME = 'authors'; 73 | export const AUTHORS_TABLE_DROP = `DROP TABLE IF EXISTS ${AUTHORS_TABLE_NAME}`; 74 | export const AUTHORS_TABLE_CREATES = [ 75 | ` 76 | CREATE TABLE ${AUTHORS_TABLE_NAME} 77 | ( 78 | author_id INT CONSTRAINT authors_pk PRIMARY KEY, 79 | full_name VARCHAR(80) not null 80 | ); 81 | `, 82 | ]; 83 | export const AUTHORS_TABLE_INSERTS = [ 84 | `INSERT INTO ${AUTHORS_TABLE_NAME} VALUES (1, 'Isasc Asimov');`, 85 | `INSERT INTO ${AUTHORS_TABLE_NAME} VALUES (2, 'James Clavell');`, 86 | ]; 87 | 88 | export const BOOKS_TABLE_NAME = 'books'; 89 | export const BOOKS_TABLE_DROP = `DROP TABLE IF EXISTS ${BOOKS_TABLE_NAME}`; 90 | export const BOOKS_TABLE_CREATES = [ 91 | ` 92 | CREATE TABLE ${BOOKS_TABLE_NAME} 93 | ( 94 | book_id INT CONSTRAINT books_pk PRIMARY KEY, 95 | title VARCHAR(80) not null 96 | ); 97 | `, 98 | ]; 99 | export const BOOKS_TABLE_INSERTS = [ 100 | `INSERT INTO ${BOOKS_TABLE_NAME} VALUES (1, 'Prelude to Foundation');`, 101 | `INSERT INTO ${BOOKS_TABLE_NAME} VALUES (2, 'The End of Eternity');`, 102 | `INSERT INTO ${BOOKS_TABLE_NAME} VALUES (3, 'Shogun');`, 103 | `INSERT INTO ${BOOKS_TABLE_NAME} VALUES (4, 'Galactic Shogun');`, 104 | ]; 105 | 106 | export const AUTHORS_BOOKS_TABLE_NAME = 'authors_books'; 107 | export const AUTHORS_BOOKS_TABLE_DROP = `DROP TABLE IF EXISTS ${AUTHORS_BOOKS_TABLE_NAME}`; 108 | export const AUTHORS_BOOKS_TABLE_CREATES = [ 109 | ` 110 | CREATE TABLE ${AUTHORS_BOOKS_TABLE_NAME} 111 | ( 112 | author_id INT NOT NULL, 113 | book_id INT NOT NULL 114 | ); 115 | `, 116 | ]; 117 | export const AUTHORS_BOOKS_TABLE_INSERTS = [ 118 | `INSERT INTO ${AUTHORS_BOOKS_TABLE_NAME} VALUES (1, 1);`, 119 | `INSERT INTO ${AUTHORS_BOOKS_TABLE_NAME} VALUES (1, 2);`, 120 | `INSERT INTO ${AUTHORS_BOOKS_TABLE_NAME} VALUES (1, 4);`, 121 | `INSERT INTO ${AUTHORS_BOOKS_TABLE_NAME} VALUES (2, 3);`, 122 | `INSERT INTO ${AUTHORS_BOOKS_TABLE_NAME} VALUES (2, 4);`, 123 | ]; 124 | 125 | export const RACES_TABLE_NAME = 'races'; 126 | export const RACES_TABLE_DROP = `DROP TABLE IF EXISTS ${RACES_TABLE_NAME}`; 127 | export const RACES_TABLE_CREATES = [ 128 | ` 129 | CREATE TABLE ${RACES_TABLE_NAME} 130 | ( 131 | race_id INT CONSTRAINT races_pk PRIMARY KEY, 132 | race_name VARCHAR(80) NOT NULL 133 | ); 134 | `, 135 | ]; 136 | export const RACES_TABLE_INSERTS = [ 137 | `INSERT INTO ${RACES_TABLE_NAME} VALUES(1, 'Orcs');`, 138 | `INSERT INTO ${RACES_TABLE_NAME} VALUES(2, 'Humans');`, 139 | `INSERT INTO ${RACES_TABLE_NAME} VALUES(3, 'Night Elves');`, 140 | `INSERT INTO ${RACES_TABLE_NAME} VALUES(4, 'Undead');`, 141 | ]; 142 | 143 | export const UNITS_TABLE_NAME = 'units'; 144 | export const UNITS_TABLE_DROP = `DROP TABLE IF EXISTS ${UNITS_TABLE_NAME}`; 145 | export const UNITS_TABLE_CREATES = [ 146 | ` 147 | CREATE TABLE ${UNITS_TABLE_NAME} 148 | ( 149 | unit_id INT CONSTRAINT units_pk PRIMARY KEY, 150 | unit_name VARCHAR(80) NOT NULL, 151 | race_id INT NOT NULL 152 | ); 153 | `, 154 | ]; 155 | export const UNITS_TABLE_INSERTS = [ 156 | `INSERT INTO ${UNITS_TABLE_NAME} VALUES(1, 'Tauren Warrior', 1);`, 157 | `INSERT INTO ${UNITS_TABLE_NAME} VALUES(2, 'Kodo Beast', 1);`, 158 | `INSERT INTO ${UNITS_TABLE_NAME} VALUES(3, 'Rifleman', 2);`, 159 | `INSERT INTO ${UNITS_TABLE_NAME} VALUES(4, 'Dryad', 3);`, 160 | `INSERT INTO ${UNITS_TABLE_NAME} VALUES(5, 'Archer', 3);`, 161 | `INSERT INTO ${UNITS_TABLE_NAME} VALUES(6, 'Ghoul', 4);`, 162 | `INSERT INTO ${UNITS_TABLE_NAME} VALUES(7, 'Frost Wyrm', 4);`, 163 | ]; 164 | 165 | export const PERSON_TABLE_NAME = 'person'; 166 | export const PERSON_TABLE_DROP = `DROP TABLE IF EXISTS ${PERSON_TABLE_NAME}`; 167 | export const PERSON_TABLE_CREATES = [ 168 | ` 169 | CREATE TABLE ${PERSON_TABLE_NAME} 170 | ( 171 | person_id INT CONSTRAINT person_pk PRIMARY KEY, 172 | name VARCHAR(80) NOT NULL, 173 | passport_id INT NOT NULL 174 | ); 175 | `, 176 | ]; 177 | export const PERSON_TABLE_INSERTS = [ 178 | `INSERT INTO ${PERSON_TABLE_NAME} VALUES(1, 'Arthas', 1);`, 179 | ]; 180 | 181 | export const PASSPORT_TABLE_NAME = 'passport'; 182 | export const PASSPORT_TABLE_DROP = `DROP TABLE IF EXISTS ${PASSPORT_TABLE_NAME}`; 183 | export const PASSPORT_TABLE_CREATES = [ 184 | ` 185 | CREATE TABLE ${PASSPORT_TABLE_NAME} 186 | ( 187 | passport_id INT CONSTRAINT passport_pk PRIMARY KEY, 188 | code VARCHAR(80) NOT NULL 189 | ); 190 | `, 191 | ]; 192 | export const PASSPORT_TABLE_INSERTS = [ 193 | `INSERT INTO ${PASSPORT_TABLE_NAME} VALUES(1, 'Frostmourne');`, 194 | ]; 195 | -------------------------------------------------------------------------------- /src/tests/integration/sqlite/sqlite.test.ts: -------------------------------------------------------------------------------- 1 | import { Sequelize } from 'sequelize-typescript'; 2 | import { ITestMetadata } from '../ITestMetadata'; 3 | import { TestRunner } from '../TestRunner'; 4 | import { 5 | DATA_TYPES_TABLE_NAME, 6 | DATA_TYPES_TABLE_DROP, 7 | DATA_TYPES_TABLE_CREATES, 8 | INDICES_TABLE_NAME, 9 | INDICES_TABLE_DROP, 10 | INDICES_TABLE_CREATES, 11 | AUTHORS_TABLE_NAME, 12 | AUTHORS_TABLE_DROP, 13 | AUTHORS_TABLE_CREATES, 14 | AUTHORS_TABLE_INSERTS, 15 | BOOKS_TABLE_NAME, 16 | BOOKS_TABLE_DROP, 17 | BOOKS_TABLE_CREATES, 18 | BOOKS_TABLE_INSERTS, 19 | AUTHORS_BOOKS_TABLE_NAME, 20 | AUTHORS_BOOKS_TABLE_DROP, 21 | AUTHORS_BOOKS_TABLE_CREATES, 22 | AUTHORS_BOOKS_TABLE_INSERTS, 23 | RACES_TABLE_NAME, 24 | RACES_TABLE_DROP, 25 | RACES_TABLE_CREATES, 26 | RACES_TABLE_INSERTS, 27 | UNITS_TABLE_NAME, 28 | UNITS_TABLE_DROP, 29 | UNITS_TABLE_CREATES, 30 | UNITS_TABLE_INSERTS, 31 | PERSON_TABLE_NAME, 32 | PERSON_TABLE_DROP, 33 | PERSON_TABLE_CREATES, 34 | PERSON_TABLE_INSERTS, 35 | PASSPORT_TABLE_NAME, 36 | PASSPORT_TABLE_DROP, 37 | PASSPORT_TABLE_CREATES, 38 | PASSPORT_TABLE_INSERTS, 39 | 40 | } from './queries'; 41 | 42 | export const testMetadata: ITestMetadata = { 43 | name: 'SQLite', 44 | dialect: 'sqlite', 45 | testTables: [ 46 | { 47 | name: DATA_TYPES_TABLE_NAME, 48 | createQueries: DATA_TYPES_TABLE_CREATES, 49 | dropQuery: DATA_TYPES_TABLE_DROP, 50 | }, 51 | { 52 | name: INDICES_TABLE_NAME, 53 | createQueries: INDICES_TABLE_CREATES, 54 | dropQuery: INDICES_TABLE_DROP, 55 | }, 56 | { 57 | name: AUTHORS_TABLE_NAME, 58 | createQueries: AUTHORS_TABLE_CREATES, 59 | dropQuery: AUTHORS_TABLE_DROP, 60 | insertQueries: AUTHORS_TABLE_INSERTS, 61 | }, 62 | { 63 | name: BOOKS_TABLE_NAME, 64 | createQueries: BOOKS_TABLE_CREATES, 65 | dropQuery: BOOKS_TABLE_DROP, 66 | insertQueries: BOOKS_TABLE_INSERTS, 67 | }, 68 | { 69 | name: AUTHORS_BOOKS_TABLE_NAME, 70 | createQueries: AUTHORS_BOOKS_TABLE_CREATES, 71 | dropQuery: AUTHORS_BOOKS_TABLE_DROP, 72 | insertQueries: AUTHORS_BOOKS_TABLE_INSERTS, 73 | }, 74 | { 75 | name: RACES_TABLE_NAME, 76 | createQueries: RACES_TABLE_CREATES, 77 | dropQuery: RACES_TABLE_DROP, 78 | insertQueries: RACES_TABLE_INSERTS, 79 | }, 80 | { 81 | name: UNITS_TABLE_NAME, 82 | createQueries: UNITS_TABLE_CREATES, 83 | dropQuery: UNITS_TABLE_DROP, 84 | insertQueries: UNITS_TABLE_INSERTS, 85 | }, 86 | { 87 | name: PERSON_TABLE_NAME, 88 | createQueries: PERSON_TABLE_CREATES, 89 | dropQuery: PERSON_TABLE_DROP, 90 | insertQueries: PERSON_TABLE_INSERTS, 91 | }, 92 | { 93 | name: PASSPORT_TABLE_NAME, 94 | createQueries: PASSPORT_TABLE_CREATES, 95 | dropQuery: PASSPORT_TABLE_DROP, 96 | insertQueries: PASSPORT_TABLE_INSERTS, 97 | }, 98 | ], 99 | filterTables: [ DATA_TYPES_TABLE_NAME ], 100 | filterSkipTables: [ INDICES_TABLE_NAME ], 101 | dataTypes: { 102 | dataTypesTable: DATA_TYPES_TABLE_NAME, 103 | async getColumnNativeDataType( 104 | connection: Sequelize, 105 | schema: string, 106 | table: string, 107 | column: string): Promise 108 | { 109 | return column; 110 | }, 111 | // TODO SQLite does not have static data types thus I'm not sure how to test type mappings 112 | testValues: [ 113 | ['varchar', 'Hello world'], 114 | ], 115 | }, 116 | associations: { 117 | leftTableOneToOne: PERSON_TABLE_NAME, 118 | rightTableOneToOne: PASSPORT_TABLE_NAME, 119 | leftTableOneToMany: RACES_TABLE_NAME, 120 | rightTableOneToMany: UNITS_TABLE_NAME, 121 | leftTableManyToMany: AUTHORS_TABLE_NAME, 122 | rightTableManyToMany: BOOKS_TABLE_NAME, 123 | }, 124 | }; 125 | 126 | const testRunner = new TestRunner(testMetadata); 127 | testRunner.run(); 128 | -------------------------------------------------------------------------------- /src/tests/integration/test-env.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | set -e 3 | 4 | if [[ -z "${TEST_DB_HOST}" ]]; then 5 | export TEST_DB_HOST="localhost"; 6 | fi 7 | 8 | if [[ -z "${TEST_DB_PORT}" ]]; then 9 | export TEST_DB_PORT="1234"; 10 | fi 11 | 12 | if [[ -z "${TEST_DB_DATABASE}" ]]; then 13 | export TEST_DB_DATABASE="testdb"; 14 | fi 15 | 16 | if [[ -z "${TEST_DB_USERNAME}" ]]; then 17 | export TEST_DB_USERNAME="sa"; 18 | fi 19 | 20 | if [[ -z "${TEST_DB_PASSWORD}" ]]; then 21 | export TEST_DB_PASSWORD="Passw0rd88!"; 22 | fi 23 | -------------------------------------------------------------------------------- /src/tests/unit/builders.test.ts: -------------------------------------------------------------------------------- 1 | import { nodeToString, generateNamedImports } from '../../builders/utils'; 2 | 3 | describe('Builder utils', () => { 4 | 5 | describe('named imports', () => { 6 | it('should generate single import statement', () => { 7 | const importsSpecifier = 'Token1'; 8 | const moduleSpecifier = `some-module`; 9 | const expected = `import { ${importsSpecifier} } from "${moduleSpecifier}";`; 10 | const generated = nodeToString(generateNamedImports([importsSpecifier], moduleSpecifier)); 11 | 12 | expect(generated).toBe(expected); 13 | }); 14 | 15 | it('should generate multiple named imports statement', () => { 16 | const importsSpecifiers = ['Token1', 'Token2', 'Token3']; 17 | const moduleSpecifier = `some-module`; 18 | const expected = `import { ${importsSpecifiers.join(`, `)} } from "${moduleSpecifier}";`; 19 | const generated = nodeToString(generateNamedImports(importsSpecifiers, moduleSpecifier)); 20 | 21 | expect(generated).toBe(expected); 22 | }); 23 | }); 24 | 25 | }); 26 | -------------------------------------------------------------------------------- /tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "compilerOptions": { 3 | "target": "ES2019", 4 | "module": "CommonJS", 5 | "declaration": true, 6 | "outDir": "./build", 7 | "rootDir": "./src", 8 | "downlevelIteration": true, 9 | "strict": true, 10 | "esModuleInterop": true, 11 | "experimentalDecorators": true, 12 | "emitDecoratorMetadata": true, 13 | "forceConsistentCasingInFileNames": false 14 | }, 15 | "exclude": [ 16 | "src/tests", 17 | "**/output-models", 18 | "**/prova.ts", 19 | "**/tmp" 20 | ] 21 | } 22 | --------------------------------------------------------------------------------