├── .dockerignore ├── .env ├── .env.development ├── .env.docker ├── .env.local ├── .eslintrc.js ├── .github ├── pull_request_template.md └── workflows │ ├── cd.yml │ └── ci.yml ├── .gitignore ├── .prettierrc ├── .sequelizerc ├── .vscode └── launch.json ├── Dockerfile ├── LICENSE ├── README.md ├── __mocks__ ├── @services │ └── circuitbreaker.js ├── @utils │ └── token.js └── bull.js ├── assets └── Node-Express-Graphql-Template.postman_collection.json ├── babel.config.js ├── badges ├── badge-branches.svg ├── badge-functions.svg ├── badge-lines.svg └── badge-statements.svg ├── config └── db.js ├── docker-compose.yml ├── jest.config.json ├── jest.setup.js ├── jsconfig.json ├── makefile ├── migrations └── 20191209182815-create-items.js ├── node_express_graphql_template_github.svg ├── nodemon.json ├── package.json ├── repo-visualizer.svg ├── resources └── v1 │ ├── 01_products.sql │ ├── 02_addresses.sql │ ├── 03_stores.sql │ ├── 04_supplier.sql │ ├── 05_supplier_products.sql │ ├── 06_store_products.sql │ ├── 07_purchased_products.sql │ └── 08_users.sql ├── scripts ├── migrate-and-run.sh └── setup-local.sh ├── seeders ├── 01_products.js ├── 02_addresses.js ├── 03_stores.js ├── 04_supplier.js ├── 05_supplier_products.js ├── 06_store_products.js ├── 07_purchased_products.js └── 08_users.js ├── server ├── cronJobs │ ├── aggregateJob.js │ └── tests │ │ └── aggregateJob.test.js ├── daos │ ├── auth.js │ ├── products.js │ ├── purchasedProducts.js │ └── tests │ │ ├── auth.test.js │ │ ├── products.test.js │ │ └── purchasedProducts.test.js ├── database │ ├── dbUtils.js │ ├── index.js │ ├── models │ │ ├── addresses.js │ │ ├── index.js │ │ ├── products.js │ │ ├── purchased_products.js │ │ ├── store_products.js │ │ ├── stores.js │ │ ├── supplier_products.js │ │ ├── suppliers.js │ │ └── users.js │ └── tests │ │ ├── dbUtils.test.js │ │ └── index.test.js ├── gql │ ├── auth │ │ ├── index.js │ │ └── tests │ │ │ └── index.test.js │ ├── fields │ │ ├── args │ │ │ └── index.js │ │ └── timestamps │ │ │ └── index.js │ ├── models │ │ ├── addresses │ │ │ └── index.js │ │ ├── aggregate │ │ │ ├── index.js │ │ │ ├── purchasedProductsUtils.js │ │ │ └── tests │ │ │ │ ├── index.test.js │ │ │ │ └── purchasedProductsUtils.test.js │ │ ├── products │ │ │ └── index.js │ │ ├── purchasedProducts │ │ │ ├── customCreateResolver.js │ │ │ ├── index.js │ │ │ └── tests │ │ │ │ └── customCreateResolver.test.js │ │ ├── storeProducts │ │ │ └── index.js │ │ ├── stores │ │ │ └── index.js │ │ ├── supplierProducts │ │ │ └── index.js │ │ ├── suppliers │ │ │ └── index.js │ │ ├── tests │ │ │ ├── addresses │ │ │ │ ├── addresses.test.js │ │ │ │ ├── mutation.test.js │ │ │ │ ├── pagination.test.js │ │ │ │ └── query.test.js │ │ │ ├── products │ │ │ │ ├── mutation.test.js │ │ │ │ ├── pagination.test.js │ │ │ │ ├── products.test.js │ │ │ │ └── query.test.js │ │ │ ├── purchasedProducts │ │ │ │ ├── mutation.test.js │ │ │ │ ├── pagination.test.js │ │ │ │ ├── purchasedProducts.test.js │ │ │ │ └── query.test.js │ │ │ ├── storeProducts │ │ │ │ ├── mutation.test.js │ │ │ │ ├── pagination.test.js │ │ │ │ ├── query.test.js │ │ │ │ └── storeProducts.test.js │ │ │ ├── stores │ │ │ │ ├── mutation.test.js │ │ │ │ ├── pagination.test.js │ │ │ │ ├── query.test.js │ │ │ │ └── stores.test.js │ │ │ ├── supplierProducts │ │ │ │ ├── mutation.test.js │ │ │ │ ├── pagination.test.js │ │ │ │ ├── query.test.js │ │ │ │ └── supplierProducts.test.js │ │ │ └── suppliers │ │ │ │ ├── mutation.test.js │ │ │ │ ├── pagination.test.js │ │ │ │ ├── query.test.js │ │ │ │ └── suppliers.test.js │ │ └── users │ │ │ └── index.js │ ├── mutations.js │ ├── node.js │ ├── queries.js │ ├── subscriptions.js │ ├── subscriptions │ │ └── purchasedProductSubscription │ │ │ ├── index.js │ │ │ ├── purchasedProductSubsUtil.js │ │ │ └── tests │ │ │ ├── index.test.js │ │ │ └── purchasedProductSubsUtil.test.js │ └── tests │ │ └── queries.test.js ├── index.js ├── middleware │ ├── gqlAuth │ │ ├── constants.js │ │ ├── index.js │ │ └── tests │ │ │ └── index.test.js │ └── logger │ │ └── index.js ├── services │ ├── circuitbreaker.js │ ├── redis.js │ ├── slack.js │ └── tests │ │ ├── circuitbreaker.test.js │ │ └── slack.test.js ├── tests │ └── index.test.js └── utils │ ├── autogenHelper.js │ ├── configureEnv.js │ ├── constants.js │ ├── gqlFieldUtils.js │ ├── gqlSchemaParsers.js │ ├── index.js │ ├── iterator.js │ ├── migrateUtils.js │ ├── passwordUtils.js │ ├── pubsub.js │ ├── queue.js │ ├── testUtils │ ├── dbConfig.js │ ├── index.js │ ├── mockData.js │ └── testApp.js │ ├── tests │ ├── getAsyncIterator.test.js │ ├── gqlFieldUtils.test.js │ ├── gqlSchemaParsers.test.js │ ├── index.test.js │ ├── migrateUtils.test.js │ ├── passwordUtils.test.js │ ├── pubsub.test.js │ ├── queue.test.js │ ├── token.test.js │ └── transformerUtils.test.js │ ├── token.js │ └── transformerUtils.js ├── sonar-project.properties ├── webpack.dev.config.js ├── webpack.prod.config.js ├── webpack.server.config.js └── yarn.lock /.dockerignore: -------------------------------------------------------------------------------- 1 | node_modules -------------------------------------------------------------------------------- /.env: -------------------------------------------------------------------------------- 1 | DB_URI=postgres://reporting_dashboard_role:reportingdashboard123@db_postgres:5432/reporting_dashboard_dev 2 | POSTGRES_HOST=db_postgres 3 | POSTGRES_DB=reporting_dashboard_dev 4 | POSTGRES_USER=reporting_dashboard_role 5 | POSTGRES_PASSWORD=reportingdashboard123 6 | ACCESS_TOKEN_SECRET=4cd7234152590dcfe77e1b6fc52e84f4d30c06fddadd0dd2fb42cbc51fa14b1bb195bbe9d72c9599ba0c6b556f9bd1607a8478be87e5a91b697c74032e0ae7af 7 | REDIS_PORT=6379 8 | # create your own slack webhook url by visiting https://.slack.com/apps/manage/custom-integrations 9 | 10 | SLACK_WEBHOOK_URL=https://hooks.slack.com/services/some/random/text -------------------------------------------------------------------------------- /.env.development: -------------------------------------------------------------------------------- 1 | DB_URI=postgres://reporting_dashboard_role:reportingdashboard123@db_postgres:5432/reporting_dashboard_dev 2 | POSTGRES_HOST=db_postgres 3 | POSTGRES_DB=reporting_dashboard_dev 4 | POSTGRES_USER=reporting_dashboard_role 5 | POSTGRES_PASSWORD=reportingdashboard123 6 | ACCESS_TOKEN_SECRET=4cd7234152590dcfe77e1b6fc52e84f4d30c06fddadd0dd2fb42cbc51fa14b1bb195bbe9d72c9599ba0c6b556f9bd1607a8478be87e5a91b697c74032e0ae7af 7 | REDIS_PORT=6379 -------------------------------------------------------------------------------- /.env.docker: -------------------------------------------------------------------------------- 1 | DB_URI=postgres://reporting_dashboard_role:reportingdashboard123@db_postgres:5432/reporting_dashboard_dev 2 | POSTGRES_HOST=db_postgres 3 | POSTGRES_DB=reporting_dashboard_dev 4 | POSTGRES_USER=reporting_dashboard_role 5 | POSTGRES_PASSWORD=reportingdashboard123 6 | POSTGRES_PORT=5432 7 | ACCESS_TOKEN_SECRET=4cd7234152590dcfe77e1b6fc52e84f4d30c06fddadd0dd2fb42cbc51fa14b1bb195bbe9d72c9599ba0c6b556f9bd1607a8478be87e5a91b697c74032e0ae7af 8 | NODE_ENV=production 9 | ENVIRONMENT_NAME=docker 10 | REDIS_HOST=redis 11 | REDIS_PORT=6379 -------------------------------------------------------------------------------- /.env.local: -------------------------------------------------------------------------------- 1 | DB_URI=postgres://reporting_dashboard_role:reportingdashboard123@localhost:5432/reporting_dashboard_dev 2 | POSTGRES_HOST=0.0.0.0 3 | POSTGRES_DB=reporting_dashboard_dev 4 | POSTGRES_USER=reporting_dashboard_role 5 | POSTGRES_PASSWORD=reportingdashboard123 6 | NODE_ENV=local 7 | ACCESS_TOKEN_SECRET=4cd7234152590dcfe77e1b6fc52e84f4d30c06fddadd0dd2fb42cbc51fa14b1bb195bbe9d72c9599ba0c6b556f9bd1607a8478be87e5a91b697c74032e0ae7af 8 | REDIS_HOST=localhost 9 | REDIS_PORT=6379 -------------------------------------------------------------------------------- /.eslintrc.js: -------------------------------------------------------------------------------- 1 | const fs = require('fs'); 2 | const path = require('path'); 3 | 4 | const prettierOptions = JSON.parse(fs.readFileSync(path.resolve(__dirname, '.prettierrc'), 'utf8')); 5 | 6 | module.exports = { 7 | parser: 'babel-eslint', 8 | extends: ['prettier-standard'], 9 | plugins: ['prettier'], 10 | env: { 11 | jest: true, 12 | browser: true, 13 | node: true, 14 | es6: true 15 | }, 16 | parserOptions: { 17 | ecmaVersion: 6, 18 | sourceType: 'module' 19 | }, 20 | rules: { 21 | 'import/no-webpack-loader-syntax': 0, 22 | curly: ['error', 'all'], 23 | 'key-spacing': [2, { beforeColon: false, afterColon: true }], 24 | 'arrow-parens': ['error', 'as-needed'], 25 | 'arrow-body-style': [2, 'as-needed'], 26 | 27 | 'class-methods-use-this': 0, 28 | 'import/imports-first': 0, 29 | 'import/newline-after-import': 0, 30 | 'import/no-dynamic-require': 0, 31 | 'import/no-extraneous-dependencies': 0, 32 | 'import/no-named-as-default': 0, 33 | 'import/no-unresolved': 0, 34 | 'import/prefer-default-export': 0, 35 | 'no-param-reassign': 0, 36 | 'max-len': 0, 37 | 'newline-per-chained-call': 0, 38 | 'no-confusing-arrow': 0, 39 | 'max-lines': ['error', { max: 300, skipBlankLines: true, skipComments: true }], 40 | 'no-unused-vars': 2, 41 | 'no-use-before-define': 0, 42 | 'prefer-template': 2, 43 | 'require-yield': 0, 44 | 'prettier/prettier': ['error', prettierOptions], 45 | 'node/handle-callback-err': ['off'] 46 | }, 47 | settings: { 48 | 'import/resolver': { 49 | node: { 50 | app: './app', 51 | context: 'app', 52 | resolve: { 53 | app: './app', 54 | paths: ['app'], 55 | modules: ['app', 'node_modules'], 56 | extensions: ['.js', '.json', '.coffee'] 57 | } 58 | } 59 | } 60 | } 61 | }; 62 | -------------------------------------------------------------------------------- /.github/pull_request_template.md: -------------------------------------------------------------------------------- 1 | ### Ticket Link 2 | 3 | --- 4 | 5 | ### Related Links 6 | 7 | --- 8 | 9 | ### Description 10 | 11 | --- 12 | 13 | ### Steps to Reproduce / Test 14 | 15 | --- 16 | 17 | --- 18 | 19 | ### Checklist 20 | 21 | - [ ] PR description included 22 | - [ ] `yarn test` passes 23 | - [ ] Tests are [changed or added] 24 | - [ ] Relevant documentation is changed or added (and PR referenced) 25 | 26 | ### GIF's 27 | 28 | --- 29 | -------------------------------------------------------------------------------- /.github/workflows/ci.yml: -------------------------------------------------------------------------------- 1 | name: Node Express GraphQL Template CI 2 | 3 | on: 4 | pull_request: 5 | branches: 6 | - develop 7 | push: 8 | branches: 9 | - develop 10 | 11 | jobs: 12 | build_and_test: 13 | name: Build & Test 14 | runs-on: ubuntu-latest 15 | strategy: 16 | matrix: 17 | node-version: [20.x] 18 | steps: 19 | - name: Checkout code 20 | uses: actions/checkout@v3 21 | with: 22 | fetch-depth: 0 23 | - name: Setup environment 24 | uses: actions/setup-node@v2 25 | with: 26 | cache: 'yarn' 27 | node-version: ${{ matrix.node-version }} 28 | - name: Install dependencies 29 | run: yarn install 30 | - name: Lint 31 | run: yarn lint 32 | - name: Test 33 | run: yarn test 34 | - name: Build 35 | run: yarn build:local 36 | - name: SonarQube Scan 37 | uses: sonarsource/sonarqube-scan-action@master 38 | with: 39 | args: > 40 | -Dsonar.scm.revision=${{ github.event.pull_request.head.sha }} 41 | env: 42 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} 43 | SONAR_TOKEN: ${{ secrets.SONAR_TOKEN }} 44 | SONAR_HOST_URL: ${{ secrets.SONAR_HOST_URL }} 45 | 46 | - uses: sonarsource/sonarqube-quality-gate-action@master 47 | timeout-minutes: 5 48 | env: 49 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} 50 | SONAR_TOKEN: ${{ secrets.SONAR_TOKEN }} 51 | SONAR_HOST_URL: ${{ secrets.SONAR_HOST_URL }} 52 | 53 | - uses: artiomtr/jest-coverage-report-action@v2.3.0 54 | with: 55 | github-token: ${{ secrets.GITHUB_TOKEN }} 56 | package-manager: yarn 57 | threshold: ${{steps.threshold.outputs.prop}} 58 | skip-step: all 59 | 60 | - uses: codecov/codecov-action@v2 61 | with: 62 | token: ${{ secrets.CODECOV_TOKEN }} 63 | verbose: true 64 | files: ./reports/test-report.xml 65 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | dist/ 2 | switcher_backup.env 3 | 4 | # Created by .ignore support plugin (hsz.mobi) 5 | ### Node template 6 | # Logs 7 | logs 8 | *.log 9 | npm-debug.log* 10 | yarn-debug.log* 11 | yarn-error.log* 12 | lerna-debug.log* 13 | dist-server/ 14 | # Diagnostic reports (https://nodejs.org/api/report.html) 15 | report.[0-9]*.[0-9]*.[0-9]*.[0-9]*.json 16 | 17 | # Runtime data 18 | pids 19 | *.pid 20 | *.seed 21 | *.pid.lock 22 | 23 | # Directory for instrumented libs generated by jscoverage/JSCover 24 | lib-cov 25 | 26 | # Coverage directory used by tools like istanbul 27 | coverage 28 | *.lcov 29 | 30 | # nyc test coverage 31 | .nyc_output 32 | 33 | # Grunt intermediate storage (https://gruntjs.com/creating-plugins#storing-task-files) 34 | .grunt 35 | 36 | # Bower dependency directory (https://bower.io/) 37 | bower_components 38 | 39 | # node-waf configuration 40 | .lock-wscript 41 | 42 | # Compiled binary addons (https://nodejs.org/api/addons.html) 43 | build/Release 44 | 45 | # Dependency directories 46 | node_modules/ 47 | jspm_packages/ 48 | 49 | # TypeScript v1 declaration files 50 | typings/ 51 | 52 | # TypeScript cache 53 | *.tsbuildinfo 54 | 55 | # Optional npm cache directory 56 | .npm 57 | 58 | # Optional eslint cache 59 | .eslintcache 60 | 61 | # Optional REPL history 62 | .node_repl_history 63 | 64 | # Output of 'npm pack' 65 | *.tgz 66 | 67 | # Yarn Integrity file 68 | .yarn-integrity 69 | 70 | # parcel-bundler cache (https://parceljs.org/) 71 | .cache 72 | 73 | # next.js build output 74 | .next 75 | 76 | # nuxt.js build output 77 | .nuxt 78 | 79 | # vuepress build output 80 | .vuepress/dist 81 | 82 | # Serverless directories 83 | .serverless/ 84 | 85 | # FuseBox cache 86 | .fusebox/ 87 | 88 | # DynamoDB Local files 89 | .dynamodb/ 90 | 91 | # Intellij 92 | .idea 93 | 94 | # DS_Store 95 | .DS_Store 96 | report.json 97 | reports/test-report.xml -------------------------------------------------------------------------------- /.prettierrc: -------------------------------------------------------------------------------- 1 | { 2 | "printWidth": 120, 3 | "tabWidth": 2, 4 | "useTabs": false, 5 | "semi": true, 6 | "singleQuote": true, 7 | "trailingComma": "none" 8 | } 9 | -------------------------------------------------------------------------------- /.sequelizerc: -------------------------------------------------------------------------------- 1 | const path = require('path'); 2 | require('./server/utils/configureEnv')(); 3 | module.exports = { 4 | 'config': path.resolve('./config', 'db.js') 5 | } -------------------------------------------------------------------------------- /.vscode/launch.json: -------------------------------------------------------------------------------- 1 | { 2 | "version": "0.2.0", 3 | 4 | "configurations": [ 5 | { 6 | "type": "node", 7 | "request": "launch", 8 | "name": "Run locally", 9 | "runtimeExecutable": "yarn", 10 | "runtimeArgs": ["start:local"], 11 | "envFile": "${workspaceFolder}/.env.local", 12 | "env": { "ENVIONMENT_NAME": "local" }, 13 | "outputCapture": "std" 14 | } 15 | ] 16 | } 17 | -------------------------------------------------------------------------------- /Dockerfile: -------------------------------------------------------------------------------- 1 | FROM node:20 2 | ARG ENVIRONMENT_NAME 3 | ARG BUILD_NAME 4 | 5 | RUN mkdir -p /app-build 6 | ADD . /app-build 7 | WORKDIR /app-build 8 | RUN --mount=type=cache,target=/root/.yarn YARN_CACHE_FOLDER=/root/.yarn yarn --frozen-lockfile 9 | RUN yarn 10 | RUN yarn build:$BUILD_NAME 11 | 12 | 13 | FROM node:20-alpine 14 | ARG ENVIRONMENT_NAME 15 | ARG BUILD_NAME 16 | 17 | RUN mkdir -p /dist 18 | RUN apk add yarn 19 | RUN yarn global add sequelize-cli@6.2.0 20 | RUN yarn add shelljs dotenv pg sequelize@6.6.5 21 | ADD scripts/migrate-and-run.sh / 22 | ADD package.json / 23 | ADD . / 24 | COPY --from=0 /app-build/dist ./dist 25 | 26 | 27 | CMD ["sh", "./migrate-and-run.sh"] 28 | EXPOSE 9000 -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2020-Present Mohammed Ali Chherawalla 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /__mocks__/@services/circuitbreaker.js: -------------------------------------------------------------------------------- 1 | let fire; 2 | 3 | function __setupMocks(f) { 4 | fire = f; 5 | } 6 | 7 | function newCircuitBreaker() { 8 | return { fire: () => fire() }; 9 | } 10 | module.exports = { 11 | __setupMocks, 12 | newCircuitBreaker, 13 | fire 14 | }; 15 | -------------------------------------------------------------------------------- /__mocks__/@utils/token.js: -------------------------------------------------------------------------------- 1 | let token = 'token'; 2 | 3 | function __setupMocks(t) { 4 | token = t; 5 | } 6 | 7 | class Token { 8 | get() { 9 | return token; 10 | } 11 | } 12 | module.exports = { 13 | Token, 14 | __setupMocks 15 | }; 16 | -------------------------------------------------------------------------------- /__mocks__/bull.js: -------------------------------------------------------------------------------- 1 | export default function(msg) { 2 | const done = () => { 3 | console.log('done'); 4 | }; 5 | const job = { 6 | id: 1, 7 | data: { 8 | message: 'This is a sample job' 9 | } 10 | }; 11 | return { 12 | data: msg, 13 | process: fn => fn(job, done), 14 | add: (name, repeat) => name && repeat 15 | }; 16 | } 17 | -------------------------------------------------------------------------------- /babel.config.js: -------------------------------------------------------------------------------- 1 | module.exports = function(api) { 2 | api.cache(true); // Caches the computed configuration 3 | 4 | const presets = ['@babel/preset-env', '@babel/preset-flow']; 5 | const plugins = [ 6 | '@babel/plugin-proposal-throw-expressions', 7 | '@babel/plugin-proposal-class-properties', 8 | '@babel/transform-runtime' 9 | ]; 10 | 11 | // Environment-specific configuration 12 | const env = process.env.BABEL_ENV || process.env.NODE_ENV; 13 | if (env !== 'test') { 14 | plugins.push([ 15 | 'module-resolver', 16 | { 17 | root: ['./src'], 18 | alias: { 19 | '@server': './server', 20 | '@root': '', 21 | '@utils': './server/utils', 22 | '@middleware': './server/middleware', 23 | '@services': './server/services', 24 | '@daos': './server/daos', 25 | '@database': './server/database', 26 | '@gql': './server/gql', 27 | '@config': './config' 28 | } 29 | } 30 | ]); 31 | } 32 | 33 | return { 34 | presets, 35 | plugins 36 | }; 37 | }; 38 | -------------------------------------------------------------------------------- /badges/badge-branches.svg: -------------------------------------------------------------------------------- 1 | Coverage:branches: 87.7%Coverage:branches87.7% -------------------------------------------------------------------------------- /badges/badge-functions.svg: -------------------------------------------------------------------------------- 1 | Coverage:functions: 94.29%Coverage:functions94.29% -------------------------------------------------------------------------------- /badges/badge-lines.svg: -------------------------------------------------------------------------------- 1 | Coverage:lines: 94.51%Coverage:lines94.51% -------------------------------------------------------------------------------- /badges/badge-statements.svg: -------------------------------------------------------------------------------- 1 | Coverage:statements: 94.4%Coverage:statements94.4% -------------------------------------------------------------------------------- /config/db.js: -------------------------------------------------------------------------------- 1 | const pg = require('pg'); 2 | const Sequelize = require('sequelize'); 3 | const dotenv = require('dotenv'); 4 | 5 | dotenv.config({ path: `.env.${process.env.ENVIRONMENT_NAME}` }); 6 | 7 | module.exports = { 8 | url: 9 | process.env.DB_URI || 10 | `postgres://${process.env.POSTGRES_USER}:${process.env.POSTGRES_PASSWORD}@${process.env.POSTGRES_HOST}/${ 11 | process.env.POSTGRES_DB 12 | }`, 13 | host: process.env.POSTGRES_HOST, 14 | dialectModule: pg, 15 | dialect: 'postgres', 16 | pool: { 17 | min: 0, 18 | max: 10, 19 | idle: 10000 20 | }, 21 | define: { 22 | underscored: true, 23 | timestamps: false 24 | }, 25 | retry: { 26 | match: [ 27 | 'unknown timed out', 28 | Sequelize.TimeoutError, 29 | 'timed', 30 | 'timeout', 31 | 'TimeoutError', 32 | 'Operation timeout', 33 | 'refuse', 34 | 'SQLITE_BUSY' 35 | ], 36 | max: 10 // maximum amount of tries 37 | } 38 | }; 39 | -------------------------------------------------------------------------------- /docker-compose.yml: -------------------------------------------------------------------------------- 1 | version: '3' 2 | services: 3 | db_postgres: 4 | image: postgres 5 | ports: 6 | - 5432:5432 7 | restart: always 8 | env_file: 9 | - .env.docker 10 | redis: 11 | image: 'redis:alpine' 12 | ports: 13 | - '6379:6379' 14 | command: ['redis-server', '--bind', 'redis', '--port', '6379'] 15 | app: 16 | build: 17 | context: . 18 | args: 19 | ENVIRONMENT_NAME: .docker 20 | BUILD_NAME: docker 21 | restart: always 22 | depends_on: 23 | - db_postgres 24 | - redis 25 | ports: 26 | - 9000:9000 27 | env_file: 28 | - ./.env.docker 29 | -------------------------------------------------------------------------------- /jest.config.json: -------------------------------------------------------------------------------- 1 | { 2 | "testEnvironment": "node", 3 | "setupFilesAfterEnv": ["./jest.setup.js"], 4 | "reporters": [ 5 | "default", 6 | [ 7 | "jest-sonar", 8 | { 9 | "outputDirectory": "reports", 10 | "outputName": "test-report.xml", 11 | "relativeRootDir": "./", 12 | "reportedFilePath": "relative" 13 | } 14 | ] 15 | ], 16 | "collectCoverageFrom": [ 17 | "**/server/**", 18 | "!**/node_modules/**", 19 | "!**/dist/**", 20 | "!**/server/database/models/**", 21 | "!**/server/utils/testUtils/**", 22 | "!**/server/utils/configureEnv.js", 23 | "!**server/middleware/logger/index.js" 24 | ], 25 | "coverageReporters": ["json-summary", "text", "lcov"], 26 | "testPathIgnorePatterns": ["/dist/"], 27 | "moduleNameMapper": { 28 | "@server(.*)$": "/server/$1", 29 | "@(database|services|gql|middleware|daos|utils)(.*)$": "/server/$1/$2", 30 | "@config(.*)$": "/config/$1", 31 | "slack-notify": "/node_modules/slack-notify/src/cjs/index.js" 32 | }, 33 | "coverageThreshold": { 34 | "global": { 35 | "statements": 82, 36 | "branches": 82, 37 | "functions": 82, 38 | "lines": 82 39 | } 40 | } 41 | } 42 | -------------------------------------------------------------------------------- /jest.setup.js: -------------------------------------------------------------------------------- 1 | import { mockDBClient } from '@server/utils/testUtils'; 2 | import { DB_ENV } from '@server/utils/testUtils/dbConfig'; 3 | 4 | jest.doMock('@database', () => ({ 5 | getClient: () => mockDBClient().client, 6 | client: mockDBClient().client, 7 | connect: () => {} 8 | })); 9 | jest.doMock('@database/models', () => ({ 10 | ...mockDBClient().models 11 | })); 12 | 13 | jest.doMock('graphql-redis-subscriptions', () => ({ 14 | RedisPubSub: () => ({ publish: () => ({}), asyncIterator: () => ({}) }) 15 | })); 16 | jest.doMock('ioredis', () => 17 | jest.fn().mockImplementation(() => ({ 18 | publish: () => ({}), 19 | set: msg => 20 | JSON.stringify({ 21 | msg 22 | }), 23 | get: msg => 24 | JSON.stringify({ 25 | msg 26 | }) 27 | })) 28 | ); 29 | 30 | process.env.ENVIRONMENT_NAME = 'test'; 31 | 32 | beforeEach(() => { 33 | process.env = { ...process.env, ...DB_ENV, ENVIRONMENT_NAME: 'test' }; 34 | }); 35 | afterEach(() => { 36 | jest.clearAllMocks(); 37 | jest.resetAllMocks(); 38 | jest.resetModules(); 39 | }); 40 | -------------------------------------------------------------------------------- /jsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "compilerOptions": { 3 | "baseUrl": ".", 4 | "paths": { 5 | "@server/*": ["server/*"], 6 | "@root/*": ["*"], 7 | "@utils/*": ["server/utils/*"], 8 | "@middleware/*": ["server/middleware/*"], 9 | "@services/*": ["server/services/*"], 10 | "@daos/*": ["server/daos/*"], 11 | "@database/*": ["server/database/*"], 12 | "@gql/*": ["server/gql/*"], 13 | "@config/*": ["config/*"] 14 | }, 15 | "moduleResolution": "Node" 16 | }, 17 | "exclude": ["./node_modules", "dist"] 18 | } 19 | -------------------------------------------------------------------------------- /makefile: -------------------------------------------------------------------------------- 1 | docker: 2 | docker-compose --env-file ./.env.docker \ 3 | -f docker-compose.yml \ 4 | -f docker-compose.yml down 5 | 6 | docker-compose --env-file ./.env.docker \ 7 | -f docker-compose.yml \ 8 | -f docker-compose.yml build 9 | 10 | docker-compose --env-file ./.env.docker \ 11 | -f docker-compose.yml \ 12 | -f docker-compose.yml up -------------------------------------------------------------------------------- /migrations/20191209182815-create-items.js: -------------------------------------------------------------------------------- 1 | const { migrate } = require('../server/utils/migrateUtils'); 2 | 3 | module.exports = { 4 | up: queryInterface => migrate(__filename, queryInterface), 5 | down: () => Promise.reject(new Error('error')) 6 | }; 7 | -------------------------------------------------------------------------------- /nodemon.json: -------------------------------------------------------------------------------- 1 | { 2 | "exec": "export ENVIRONMENT_NAME=local && babel-node server/index.js ", 3 | "watch": ["server/*", "./public/*"], 4 | "ignore": ["**/__tests__/**", "*.test.js", "*.spec.js"], 5 | "events": { 6 | "restart": "kill-port 9000", 7 | "crash": "kill-port 9000 && export ENVIRONMENT_NAME=local && babel-node server/index.js " 8 | } 9 | } 10 | -------------------------------------------------------------------------------- /resources/v1/01_products.sql: -------------------------------------------------------------------------------- 1 | CREATE TABLE products 2 | ( 3 | id serial NOT NULL PRIMARY KEY, 4 | name text NOT NULL, 5 | category text NOT NULL, 6 | amount bigint NOT NULL, 7 | created_at timestamp 8 | WITH time zone DEFAULT NOW 9 | (), 10 | updated_at timestamp 11 | WITH time zone, 12 | deleted_at timestamp 13 | WITH time zone 14 | ); 15 | 16 | CREATE INDEX products_name ON products USING btree 17 | (name); 18 | 19 | CREATE INDEX products_category ON products USING btree 20 | (category); 21 | 22 | -------------------------------------------------------------------------------- /resources/v1/02_addresses.sql: -------------------------------------------------------------------------------- 1 | CREATE TABLE addresses ( 2 | id serial NOT NULL PRIMARY KEY, 3 | address_1 text NOT NULL, 4 | address_2 text NOT NULL, 5 | city text NOT NULL, 6 | country text NOT NULL, 7 | latitude float8 NOT NULL, 8 | longitude float8 NOT NULL, 9 | created_at timestamp WITH time zone DEFAULT NOW(), 10 | updated_at timestamp WITH time zone, 11 | deleted_at timestamp WITH time zone 12 | ); 13 | 14 | CREATE INDEX addresses_latitude ON addresses USING btree (latitude); 15 | 16 | CREATE INDEX addresses_longitude ON addresses USING btree (longitude); 17 | 18 | -------------------------------------------------------------------------------- /resources/v1/03_stores.sql: -------------------------------------------------------------------------------- 1 | CREATE TABLE stores ( 2 | id serial NOT NULL PRIMARY KEY, 3 | name text NOT NULL, 4 | address_id integer NOT NULL, 5 | created_at timestamp WITH time zone DEFAULT NOW(), 6 | updated_at timestamp WITH time zone, 7 | deleted_at timestamp WITH time zone, 8 | CONSTRAINT stores_address_id FOREIGN KEY(address_id) REFERENCES addresses (id) 9 | ); 10 | 11 | CREATE INDEX store_name ON stores USING btree (name); 12 | 13 | -------------------------------------------------------------------------------- /resources/v1/04_supplier.sql: -------------------------------------------------------------------------------- 1 | CREATE TABLE suppliers ( 2 | id serial NOT NULL PRIMARY KEY, 3 | name text NOT NULL, 4 | address_id integer NOT NULL, 5 | created_at timestamp WITH time zone DEFAULT NOW(), 6 | updated_at timestamp WITH time zone, 7 | deleted_at timestamp WITH time zone, 8 | CONSTRAINT suppliers_address_id FOREIGN KEY (address_id) REFERENCES addresses (id) 9 | ); 10 | 11 | CREATE INDEX supplier_name ON suppliers USING btree (name); 12 | 13 | -------------------------------------------------------------------------------- /resources/v1/05_supplier_products.sql: -------------------------------------------------------------------------------- 1 | CREATE TABLE supplier_products 2 | ( 3 | id serial NOT NULL PRIMARY KEY, 4 | product_id integer NOT NULL, 5 | supplier_id integer NOT NULL, 6 | created_at timestamp 7 | WITH time zone DEFAULT NOW 8 | (), 9 | updated_at timestamp 10 | WITH time zone, 11 | deleted_at timestamp 12 | WITH time zone, 13 | CONSTRAINT suppliers_product_products_id FOREIGN KEY 14 | (product_id) REFERENCES products 15 | (id), 16 | CONSTRAINT suppliers_product_supplier_id FOREIGN KEY 17 | (supplier_id) REFERENCES suppliers 18 | (id), 19 | CONSTRAINT supplier_products_unique_key UNIQUE (product_id, supplier_id) 20 | ); 21 | 22 | -------------------------------------------------------------------------------- /resources/v1/06_store_products.sql: -------------------------------------------------------------------------------- 1 | CREATE TABLE store_products 2 | ( 3 | id serial NOT NULL PRIMARY KEY, 4 | product_id integer NOT NULL, 5 | store_id integer NOT NULL, 6 | created_at timestamp 7 | WITH time zone DEFAULT NOW 8 | (), 9 | updated_at timestamp 10 | WITH time zone, 11 | deleted_at timestamp 12 | WITH time zone, 13 | CONSTRAINT store_products_product_id FOREIGN KEY 14 | (product_id) REFERENCES products 15 | (id), 16 | CONSTRAINT store_products_store_id FOREIGN KEY 17 | (store_id) REFERENCES stores 18 | (id), 19 | CONSTRAINT store_products_unique_key UNIQUE (product_id, store_id) 20 | ); 21 | 22 | -------------------------------------------------------------------------------- /resources/v1/07_purchased_products.sql: -------------------------------------------------------------------------------- 1 | CREATE TABLE purchased_products 2 | ( 3 | id serial NOT NULL PRIMARY KEY, 4 | product_id integer NOT NULL, 5 | price integer NOT NULL, 6 | discount integer NOT NULL, 7 | store_id integer NOT NULL, 8 | delivery_date timestamp 9 | WITH time zone NOT NULL, 10 | created_at timestamp 11 | WITH time zone DEFAULT NOW(), 12 | updated_at timestamp 13 | WITH time zone, 14 | deleted_at timestamp 15 | WITH time zone, 16 | CONSTRAINT purchased_products_product_id FOREIGN KEY 17 | (product_id) REFERENCES products 18 | (id), 19 | CONSTRAINT purchased_products_store_id FOREIGN KEY (store_id) REFERENCES stores (id) 20 | ); 21 | 22 | CREATE INDEX suppliers_delivery_date ON purchased_products USING btree 23 | (delivery_date); 24 | CREATE INDEX store_id ON purchased_products USING btree 25 | (store_id); 26 | 27 | -------------------------------------------------------------------------------- /resources/v1/08_users.sql: -------------------------------------------------------------------------------- 1 | CREATE TABLE users ( 2 | id serial NOT NULL PRIMARY KEY, 3 | first_name TEXT NOT NULL, 4 | last_name TEXT NOT NULL, 5 | email TEXT NOT NULL UNIQUE, 6 | password TEXT NOT NULL, 7 | created_at timestamp WITH time zone DEFAULT NOW(), 8 | updated_at timestamp WITH time zone, 9 | deleted_at timestamp WITH time zone 10 | ); 11 | 12 | CREATE INDEX user_email ON users(email); 13 | 14 | -------------------------------------------------------------------------------- /scripts/migrate-and-run.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | set -a . ".env$ENVIRONMENT_NAME" set +a 3 | sleep 10 4 | echo $BUILD_NAME 5 | if [ "$BUILD_NAME" == "local" ] 6 | then 7 | npx sequelize-cli db:drop 8 | npx sequelize-cli db:create 9 | fi 10 | 11 | npx sequelize-cli db:migrate 12 | 13 | # seed data for local builds 14 | if [ "$BUILD_NAME" == "local" ] 15 | then 16 | for file in seeders/* 17 | do 18 | : 19 | npx sequelize-cli db:seed --seed $file 20 | done 21 | fi 22 | 23 | yarn start -------------------------------------------------------------------------------- /scripts/setup-local.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | 3 | export ENVIRONMENT_NAME=local 4 | export NODE_ENV=local 5 | npx sequelize db:drop 6 | npx sequelize db:create 7 | npx sequelize db:migrate 8 | npx sequelize db:seed:all 9 | yarn start:local -------------------------------------------------------------------------------- /seeders/01_products.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | up: queryInterface => { 3 | const faker = require('faker'); 4 | const range = require('lodash/range'); 5 | const arr = range(1, 2000).map((value, index) => ({ 6 | name: faker.commerce.productName(), 7 | category: faker.commerce.department(), 8 | amount: parseFloat(faker.commerce.price()) * 100 9 | })); 10 | return queryInterface.bulkInsert('products', arr, {}); 11 | }, 12 | down: queryInterface => queryInterface.bulkDelete('products', null, {}) 13 | }; 14 | -------------------------------------------------------------------------------- /seeders/02_addresses.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | up: queryInterface => { 3 | const faker = require('faker'); 4 | const range = require('lodash/range'); 5 | const arr = range(1, 2000).map((value, index) => ({ 6 | address_1: faker.address.streetName(), 7 | address_2: faker.address.streetAddress(), 8 | city: faker.address.city(), 9 | country: faker.address.country(), 10 | latitude: faker.address.latitude(), 11 | longitude: faker.address.longitude() 12 | })); 13 | return queryInterface.bulkInsert('addresses', arr, {}); 14 | }, 15 | down: queryInterface => queryInterface.bulkDelete('addresses', null, {}) 16 | }; 17 | -------------------------------------------------------------------------------- /seeders/03_stores.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | up: queryInterface => { 3 | const faker = require('faker'); 4 | const range = require('lodash/range'); 5 | const arr = range(1, 2000).map((value, index) => ({ 6 | name: faker.company.companyName(), 7 | address_id: 1 + parseInt(Math.random() * 1999) 8 | })); 9 | return queryInterface.bulkInsert('stores', arr, {}); 10 | }, 11 | down: queryInterface => queryInterface.bulkDelete('stores', null, {}) 12 | }; 13 | -------------------------------------------------------------------------------- /seeders/04_supplier.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | up: queryInterface => { 3 | const faker = require('faker'); 4 | const range = require('lodash/range'); 5 | const arr = range(1, 2000).map((value, index) => ({ 6 | name: faker.company.companyName(), 7 | address_id: 1 + parseInt(Math.random() * 1999) 8 | })); 9 | return queryInterface.bulkInsert('suppliers', arr, {}); 10 | }, 11 | down: queryInterface => queryInterface.bulkDelete('suppliers', null, {}) 12 | }; 13 | -------------------------------------------------------------------------------- /seeders/05_supplier_products.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | up: queryInterface => { 3 | const range = require('lodash/range'); 4 | const arr = range(1, 2000).map((value, index) => ({ 5 | supplier_id: 1 + parseInt(Math.random() * 1999), 6 | product_id: index + 1 7 | })); 8 | return queryInterface.bulkInsert('supplier_products', arr, {}); 9 | }, 10 | down: queryInterface => queryInterface.bulkDelete('supplier_products', null, {}) 11 | }; 12 | -------------------------------------------------------------------------------- /seeders/06_store_products.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | up: queryInterface => { 3 | const range = require('lodash/range'); 4 | const arr = range(1, 2000).map((value, index) => ({ 5 | store_id: 1 + parseInt(Math.random() * 1999), 6 | product_id: 1 + index 7 | })); 8 | return queryInterface.bulkInsert('store_products', arr, {}); 9 | }, 10 | down: queryInterface => queryInterface.bulkDelete('store_products', null, {}) 11 | }; 12 | -------------------------------------------------------------------------------- /seeders/07_purchased_products.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | up: queryInterface => { 3 | const faker = require('faker'); 4 | const moment = require('moment'); 5 | const range = require('lodash/range'); 6 | const MAR_11_2022 = 1646981350749; 7 | const OCT_10_1994 = 782980686236; 8 | const arr = range(1, 10000).map((value, index) => { 9 | const price = parseFloat(faker.commerce.price()) * 100; 10 | return { 11 | price, 12 | product_id: 1 + parseInt(Math.random() * 1999), 13 | store_id: 1 + parseInt(Math.random() * 1999), 14 | discount: parseInt(price / (Math.random() * 100)), 15 | delivery_date: moment(MAR_11_2022 + 86400000 * index).format('YYYY-MM-DDTHH:mm:ss.SSSZ'), 16 | created_at: moment(OCT_10_1994 + 86400000 * index).format('YYYY-MM-DDTHH:mm:ss.SSSZ') 17 | }; 18 | }); 19 | return queryInterface.bulkInsert('purchased_products', arr, {}); 20 | }, 21 | down: queryInterface => queryInterface.bulkDelete('purchased_products', null, {}) 22 | }; 23 | -------------------------------------------------------------------------------- /seeders/08_users.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | up: queryInterface => { 3 | const faker = require('faker'); 4 | const range = require('lodash/range'); 5 | const arr = range(1, 100).map((value, index) => { 6 | const createdBefore = parseInt(Math.random() * 1000); 7 | const crypto = require('crypto'); 8 | const salt = crypto.randomBytes(16).toString('hex'); 9 | const hashedPassword = `${salt}:${crypto.scryptSync('wednesdaySolutions', salt, 64).toString('hex')}`; 10 | return { 11 | first_name: faker.name.firstName(), 12 | last_name: faker.name.lastName(), 13 | email: `mac+${index}@wednesday.is`, 14 | password: hashedPassword, 15 | created_at: faker.date.recent(createdBefore) 16 | }; 17 | }); 18 | return queryInterface.bulkInsert('users', arr, {}); 19 | }, 20 | down: queryInterface => queryInterface.bulkDelete('users', null, {}) 21 | }; 22 | -------------------------------------------------------------------------------- /server/cronJobs/aggregateJob.js: -------------------------------------------------------------------------------- 1 | import { getAllCategories } from '@server/daos/products'; 2 | import moment from 'moment'; 3 | import { 4 | getCountByDate, 5 | getCountByDateForCategory, 6 | getEarliestCreatedDate, 7 | getTotalByDate, 8 | getTotalByDateForCategory 9 | } from '@server/daos/purchasedProducts'; 10 | import { redis } from '@server/services/redis'; 11 | import { logger } from '@server/utils'; 12 | import { REDIS_IMPLEMENTATION_DATE } from '@server/utils/constants'; 13 | 14 | export const aggregateCheck = async () => { 15 | let startDate; 16 | let lastSyncFor; 17 | const endDate = moment(REDIS_IMPLEMENTATION_DATE); 18 | const redisValueForLastSync = await redis.get('lastSyncFor'); 19 | if (redisValueForLastSync) { 20 | lastSyncFor = moment(redisValueForLastSync); 21 | } 22 | if (!lastSyncFor) { 23 | startDate = moment(await getEarliestCreatedDate()); 24 | } else if (moment(lastSyncFor).isSameOrAfter(endDate)) { 25 | logger().info(`Redis is updated with aggregate values until ${endDate}`); 26 | return; 27 | } else { 28 | startDate = lastSyncFor; 29 | } 30 | const categories = await getAllCategories(); 31 | 32 | while (moment(startDate).isBefore(endDate)) { 33 | const totalForDate = await getTotalByDate(startDate); 34 | const countForDate = await getCountByDate(startDate); 35 | const formattedDate = startDate.format('YYYY-MM-DD'); 36 | redis.set( 37 | `${formattedDate}_total`, 38 | JSON.stringify({ 39 | total: totalForDate, 40 | count: countForDate 41 | }) 42 | ); 43 | categories.forEach(async category => { 44 | const categoryTotal = await getTotalByDateForCategory(startDate, category); 45 | const categoryCount = await getCountByDateForCategory(startDate, category); 46 | redis.set( 47 | `${formattedDate}_${category}`, 48 | JSON.stringify({ 49 | total: categoryTotal, 50 | count: categoryCount 51 | }) 52 | ); 53 | await redis.set('lastSyncFor', formattedDate); 54 | }); 55 | startDate = startDate.add(1, 'day'); 56 | } 57 | }; 58 | -------------------------------------------------------------------------------- /server/cronJobs/tests/aggregateJob.test.js: -------------------------------------------------------------------------------- 1 | import { redis } from '@server/services/redis'; 2 | import * as utils from '@server/utils'; 3 | import * as purchasedProductsQueries from '@daos/purchasedProducts'; 4 | import { REDIS_IMPLEMENTATION_DATE } from '@server/utils/constants'; 5 | import { aggregateCheck } from '../aggregateJob'; 6 | import moment from 'moment'; 7 | import db from '@database/models'; 8 | describe('Aggregate job tests', () => { 9 | it('should log that everything is jup to date if lastSyncFor is equal to end date', async () => { 10 | jest.spyOn(redis, 'get').mockReturnValueOnce(REDIS_IMPLEMENTATION_DATE); 11 | const spy = jest.spyOn(utils, 'logger'); 12 | await aggregateCheck(); 13 | expect(spy).toBeCalledTimes(1); 14 | }); 15 | it('should calculate and set values in redis if the lastSyncFor date is not present', async () => { 16 | jest.spyOn(redis, 'get').mockReturnValueOnce(); 17 | const spy = jest.spyOn(purchasedProductsQueries, 'getEarliestCreatedDate'); 18 | await aggregateCheck(); 19 | expect(spy).toBeCalledTimes(1); 20 | }); 21 | it('should calculate and set values from the lastSyncFor date to the end date if lastSyncFor date is less than endDate', async () => { 22 | db.purchasedProducts.count = () => [{ count: 1 }]; 23 | require('@daos/purchasedProducts'); 24 | jest 25 | .spyOn(redis, 'get') 26 | .mockReturnValueOnce( 27 | moment('16-03-2022', 'DD-MM-YYYY') 28 | .subtract(2, 'day') 29 | .format('YYYY-MM-DD') 30 | ) 31 | .mockReturnValueOnce(JSON.stringify(['Shoes', 'Health'])); 32 | const spy = jest.spyOn(redis, 'set'); 33 | await aggregateCheck(); 34 | expect(spy).toBeCalledTimes(6); 35 | }); 36 | }); 37 | -------------------------------------------------------------------------------- /server/daos/auth.js: -------------------------------------------------------------------------------- 1 | import db from '@database/models'; 2 | import { checkPassword, createPassword } from '@utils/passwordUtils'; 3 | 4 | export const getUserByEmailPassword = async (email, password) => { 5 | const user = await db.users.findOne({ 6 | where: { email } 7 | }); 8 | 9 | if (!user) { 10 | throw Error('Invalid username/password'); 11 | } 12 | if (await checkPassword(password, user.password)) { 13 | return user; 14 | } else { 15 | throw Error('Invalid username/password'); 16 | } 17 | }; 18 | 19 | export const createUserBySignup = async (firstName, lastName, email, password) => { 20 | const encryptedPassword = createPassword(password); 21 | return db.users.create({ 22 | firstName, 23 | lastName, 24 | email, 25 | password: encryptedPassword 26 | }); 27 | }; 28 | -------------------------------------------------------------------------------- /server/daos/products.js: -------------------------------------------------------------------------------- 1 | import db from '@database/models'; 2 | import Sequelize from 'sequelize'; 3 | import { redis } from '@services/redis'; 4 | import { transformDbArrayResponseToRawResponse } from '@server/utils/transformerUtils'; 5 | 6 | export const getCategoryById = async id => { 7 | const product = await db.products.findOne({ where: { id } }); 8 | return product.category; 9 | }; 10 | 11 | export const getAllCategories = async () => { 12 | const categoriesFromRedis = await redis.get('categories'); 13 | // add mutation to add category to redis for create product mutation 14 | if (!categoriesFromRedis) { 15 | const allCategories = await db.products.findAll({ 16 | attributes: [[Sequelize.fn('DISTINCT', Sequelize.col('category')), 'category']] 17 | }); 18 | const response = transformDbArrayResponseToRawResponse(allCategories); 19 | const categories = response.map(item => item.category); 20 | redis.set('categories', JSON.stringify(categories)); 21 | return categories; 22 | } else { 23 | return JSON.parse(categoriesFromRedis); 24 | } 25 | }; 26 | -------------------------------------------------------------------------------- /server/daos/purchasedProducts.js: -------------------------------------------------------------------------------- 1 | import db from '@database/models'; 2 | import { Op } from 'sequelize'; 3 | 4 | export const insertPurchasedProducts = args => db.purchasedProducts.create(args); 5 | 6 | export const getEarliestCreatedDate = async () => { 7 | const earliestPurchasedProduct = await db.purchasedProducts.findOne({ 8 | order: ['id'] 9 | }); 10 | const date = earliestPurchasedProduct.createdAt.toISOString().split('T')[0]; 11 | return date; 12 | }; 13 | 14 | export const getTotalByDate = async date => { 15 | const total = await db.purchasedProducts.sum('price', { 16 | where: { createdAt: { [Op.lt]: date.endOf('day').toISOString(), [Op.gt]: date.startOf('day').toISOString() } } 17 | }); 18 | return total || 0; 19 | }; 20 | export const getTotalByDateForCategory = async (date, category) => { 21 | const total = await db.purchasedProducts.sum('price', { 22 | where: { createdAt: { [Op.lt]: date.endOf('day').toISOString(), [Op.gt]: date.startOf('day').toISOString() } }, 23 | include: [ 24 | { 25 | model: db.products, 26 | as: 'product', 27 | where: { 28 | category 29 | }, 30 | required: true 31 | } 32 | ], 33 | group: ['product.id'] 34 | }); 35 | return total || 0; 36 | }; 37 | 38 | export const getCountByDate = async date => { 39 | const total = await db.purchasedProducts.count({ 40 | where: { createdAt: { [Op.lt]: date.endOf('day').toISOString(), [Op.gt]: date.startOf('day').toISOString() } } 41 | }); 42 | return total; 43 | }; 44 | 45 | export const getCountByDateForCategory = async (date, category) => { 46 | const total = await db.purchasedProducts.count({ 47 | where: { createdAt: { [Op.lt]: date.endOf('day').toISOString(), [Op.gt]: date.startOf('day').toISOString() } }, 48 | include: [ 49 | { 50 | model: db.products, 51 | as: 'product', 52 | where: { 53 | category 54 | }, 55 | required: true 56 | } 57 | ], 58 | group: ['product.id'] 59 | }); 60 | return total[0] ? Number(total[0].count) : 0; 61 | }; 62 | -------------------------------------------------------------------------------- /server/daos/tests/auth.test.js: -------------------------------------------------------------------------------- 1 | import { getUserByEmailPassword, createUserBySignup } from '../auth'; 2 | import db from '@database/models'; 3 | import { checkPassword, createPassword } from '@server/utils/passwordUtils'; 4 | 5 | describe('getUserBySign tests', () => { 6 | const email = 'rohansaroha2@wednesday.is'; 7 | const password = '1234'; 8 | const hashedPassword = createPassword(password); 9 | const user = { email, password: hashedPassword }; 10 | let mock; 11 | 12 | beforeEach(() => { 13 | mock = jest.spyOn(db.users, 'findOne'); 14 | mock.mockReturnValue(user); 15 | }); 16 | 17 | it('should ensure that it return user when password is correct', async () => { 18 | const res = await getUserByEmailPassword(email, password); 19 | expect(res).toEqual(user); 20 | }); 21 | }); 22 | 23 | describe('creatUserBySignup tests', () => { 24 | it('should ensure it calls correct params to db', async () => { 25 | const firstName = 'abc'; 26 | const lastName = 'x'; 27 | const email = 'abc@wednesday.is'; 28 | const password = '1234'; 29 | const mock = jest.spyOn(db.users, 'create'); 30 | await createUserBySignup(firstName, lastName, email, password); 31 | expect(mock).toHaveBeenCalledWith({ firstName, lastName, email, password: expect.any(String) }); 32 | expect(mock.mock.calls[0][0].password.length).toEqual(161); 33 | const hashedPassword = mock.mock.calls[0][0].password; 34 | expect(checkPassword(password, hashedPassword)); 35 | }); 36 | }); 37 | -------------------------------------------------------------------------------- /server/daos/tests/products.test.js: -------------------------------------------------------------------------------- 1 | const { redis } = require('@server/services/redis'); 2 | const { getCategoryById, getAllCategories } = require('../products'); 3 | 4 | describe('Products dao tests', () => { 5 | it('should return the product category if Id is provided', async () => { 6 | const productId = 133; 7 | const res = await getCategoryById(productId); 8 | expect(res).toEqual('Sports'); 9 | }); 10 | describe('getAllCategories tests', () => { 11 | const categories = ['Tools', 'Electronics', 'Sports', 'Books', 'Clothing', 'Kids', 'Music']; 12 | const mockCategoriesValue = ['Sports']; 13 | it('should return the categories data from redis', async () => { 14 | jest.spyOn(redis, 'get').mockReturnValueOnce(JSON.stringify(categories)); 15 | const res = await getAllCategories(); 16 | expect(res).toEqual(categories); 17 | }); 18 | it('should return the categories data from database if not present in redis', async () => { 19 | jest.spyOn(redis, 'get').mockReturnValueOnce(NaN); 20 | const res = await getAllCategories(); 21 | expect(res).toEqual(mockCategoriesValue); 22 | }); 23 | }); 24 | }); 25 | -------------------------------------------------------------------------------- /server/daos/tests/purchasedProducts.test.js: -------------------------------------------------------------------------------- 1 | import db from '@database/models'; 2 | import moment from 'moment'; 3 | import { 4 | insertPurchasedProducts, 5 | getEarliestCreatedDate, 6 | getTotalByDate, 7 | getTotalByDateForCategory, 8 | getCountByDate, 9 | getCountByDateForCategory 10 | } from '../purchasedProducts'; 11 | 12 | describe('purchasedProducts tests', () => { 13 | const defaultTotalPrice = 500; 14 | const date = moment('2022-03-16'); 15 | const category = 'Sports'; 16 | const price = 1122; 17 | const productId = 133; 18 | const discount = 111; 19 | const deliveryDate = '"2022-07-20T17:30:15+05:30"'; 20 | const purchasedProduct = { 21 | price, 22 | productId, 23 | discount, 24 | deliveryDate 25 | }; 26 | 27 | it('should insert a purchased product and call with correct params', async () => { 28 | const mock = jest.spyOn(db.purchasedProducts, 'create'); 29 | await insertPurchasedProducts(purchasedProduct); 30 | expect(mock).toHaveBeenCalledWith(purchasedProduct); 31 | }); 32 | 33 | describe('earliestCreatedDate tests', () => { 34 | it('should return the earliest created purchasedProduct ', async () => { 35 | const res = await getEarliestCreatedDate(); 36 | expect(res).toEqual(new Date().toISOString().split('T')[0]); 37 | }); 38 | }); 39 | describe('getTotalByDate tests', () => { 40 | it('should return the total of price for a particular provided date', async () => { 41 | const res = await getTotalByDate(date); 42 | expect(res).toEqual(defaultTotalPrice); 43 | }); 44 | it('should return zero if there is no value present for the date', async () => { 45 | jest.spyOn(db.purchasedProducts, 'sum').mockReturnValueOnce(NaN); 46 | const mockDate = moment('2022-01-03'); 47 | const res = await getTotalByDate(mockDate); 48 | expect(res).toEqual(0); 49 | }); 50 | }); 51 | describe('getTotalByDateForCategory tests', () => { 52 | it('should return the total for a particular category on a provided date', async () => { 53 | jest.spyOn(db.purchasedProducts, 'sum').mockReturnValueOnce(defaultTotalPrice); 54 | const res = await getTotalByDateForCategory(date, category); 55 | expect(res).toEqual(defaultTotalPrice); 56 | }); 57 | it('should return zero if there is no value present for the date', async () => { 58 | jest.spyOn(db.purchasedProducts, 'sum').mockReturnValueOnce(NaN); 59 | const mockDate = moment('2022-01-03'); 60 | const res = await getTotalByDateForCategory(mockDate); 61 | expect(res).toEqual(0); 62 | }); 63 | }); 64 | describe('getCountByDate tests', () => { 65 | const mockValue = 1; 66 | it('should return count by date', async () => { 67 | db.purchasedProducts.count = jest.fn().mockImplementationOnce(() => mockValue); 68 | const res = await getCountByDate(date); 69 | expect(res).toBe(mockValue); 70 | }); 71 | it('should return zero if there is no count present for the date', async () => { 72 | db.purchasedProducts.count = jest.fn().mockImplementationOnce(() => 0); 73 | const mockDate = moment('2022-01-03'); 74 | const res = await getCountByDate(mockDate); 75 | expect(res).toEqual(0); 76 | }); 77 | }); 78 | describe('getCountByDateForCategory', () => { 79 | const mockValue = [{ count: 1 }]; 80 | it('should return the count for category for a particular date', async () => { 81 | db.purchasedProducts.count = jest.fn().mockReturnValueOnce(mockValue); 82 | const res = await getCountByDateForCategory(date, category); 83 | expect(res).toEqual(1); 84 | }); 85 | it('should return zero if there is no count present for the date for the category', async () => { 86 | db.purchasedProducts.count = jest.fn().mockImplementationOnce(() => NaN); 87 | const mockDate = moment('2022-01-03'); 88 | const res = await getCountByDateForCategory(mockDate); 89 | expect(res).toEqual(0); 90 | }); 91 | }); 92 | }); 93 | -------------------------------------------------------------------------------- /server/database/dbUtils.js: -------------------------------------------------------------------------------- 1 | import { GraphQLNonNull, GraphQLInt, GraphQLObjectType } from 'graphql'; 2 | import { Op } from 'sequelize'; 3 | import deepMapKeys from 'deep-map-keys'; 4 | import { logger } from '@server/utils'; 5 | 6 | export const sequelizedWhere = (currentWhere = {}, where = {}) => { 7 | where = deepMapKeys(where, k => { 8 | if (Op[k]) { 9 | return Op[k]; 10 | } 11 | return k; 12 | }); 13 | return { ...currentWhere, ...where }; 14 | }; 15 | export const updateUsingId = async (model, args) => { 16 | let affectedRows; 17 | try { 18 | [affectedRows] = await model.update(args, { 19 | where: { 20 | id: args.id, 21 | deletedAt: null 22 | } 23 | }); 24 | } catch (e) { 25 | logger().info('e', e); 26 | throw new Error(`Failed to update ${model.name}`); 27 | } 28 | if (!affectedRows) { 29 | throw new Error('Data not found'); 30 | } 31 | return model.findOne({ where: { id: args.id } }); 32 | }; 33 | 34 | export const deleteUsingId = async (model, args) => { 35 | let affectedRows; 36 | try { 37 | affectedRows = await model.destroy({ where: { id: args.id, deletedAt: null } }); 38 | } catch (e) { 39 | throw new Error(`Failed to delete ${model.name}`); 40 | } 41 | if (!affectedRows) { 42 | throw new Error('Data not found'); 43 | } 44 | return args; 45 | }; 46 | 47 | export const deletedId = new GraphQLObjectType({ 48 | name: 'Id', 49 | fields: () => ({ id: { type: new GraphQLNonNull(GraphQLInt) } }) 50 | }); 51 | -------------------------------------------------------------------------------- /server/database/index.js: -------------------------------------------------------------------------------- 1 | import Sequelize from 'sequelize'; 2 | import { getLogger, isTestEnv, logger } from '@server/utils'; 3 | import dbConfig from '@config/db'; 4 | 5 | let client; 6 | let namespace; 7 | const cls = require('cls-hooked'); 8 | 9 | export const getClient = force => { 10 | if (!namespace) { 11 | namespace = cls.createNamespace(`${process.env.ENVIRONMENT_NAME}-namespace`); 12 | } 13 | if (force || !client) { 14 | try { 15 | if (!isTestEnv()) { 16 | Sequelize.useCLS(namespace); 17 | } 18 | client = new Sequelize(dbConfig.url, { 19 | logging: isTestEnv() ? false : getLogger(), 20 | ...dbConfig 21 | }); 22 | } catch (err) { 23 | logger().info({ err }); 24 | throw err; 25 | } 26 | } 27 | return client; 28 | }; 29 | export const connect = async () => { 30 | client = getClient(); 31 | try { 32 | await client.authenticate(); 33 | console.log('Connection has been established successfully.\n', { 34 | db_uri: dbConfig.url 35 | }); 36 | } catch (error) { 37 | console.error('Unable to connect to the database:', error); 38 | throw error; 39 | } 40 | }; 41 | export { client }; 42 | -------------------------------------------------------------------------------- /server/database/models/addresses.js: -------------------------------------------------------------------------------- 1 | export function getAttributes(sequelize, DataTypes) { 2 | return { 3 | id: { 4 | type: DataTypes.INTEGER, 5 | allowNull: false, 6 | primaryKey: true, 7 | autoIncrement: true 8 | }, 9 | address1: { 10 | field: 'address_1', 11 | type: DataTypes.TEXT, 12 | allowNull: false 13 | }, 14 | address2: { 15 | field: 'address_2', 16 | type: DataTypes.TEXT, 17 | allowNull: false 18 | }, 19 | city: { 20 | type: DataTypes.TEXT, 21 | allowNull: false 22 | }, 23 | country: { 24 | type: DataTypes.TEXT, 25 | allowNull: false 26 | }, 27 | latitude: { 28 | type: DataTypes.DOUBLE, 29 | allowNull: false 30 | }, 31 | longitude: { 32 | type: DataTypes.DOUBLE, 33 | allowNull: false 34 | }, 35 | createdAt: { 36 | field: 'created_at', 37 | type: DataTypes.DATE, 38 | allowNull: true, 39 | defaultValue: sequelize.fn('now') 40 | }, 41 | updatedAt: { 42 | field: 'updated_at', 43 | type: DataTypes.DATE, 44 | allowNull: true 45 | }, 46 | deletedAt: { 47 | field: 'deleted_at', 48 | type: DataTypes.DATE, 49 | allowNull: true 50 | } 51 | }; 52 | } 53 | 54 | export function model(sequelize, DataTypes) { 55 | const addresses = sequelize.define('addresses', getAttributes(sequelize, DataTypes), { 56 | tableName: 'addresses', 57 | paranoid: true, 58 | timestamps: true 59 | }); 60 | addresses.associate = function(models) { 61 | addresses.hasMany(models.suppliers, { 62 | sourceKey: 'id' 63 | }); 64 | 65 | addresses.hasMany(models.stores, { 66 | sourceKey: 'id' 67 | }); 68 | }; 69 | 70 | return addresses; 71 | } 72 | -------------------------------------------------------------------------------- /server/database/models/index.js: -------------------------------------------------------------------------------- 1 | import Sequelize from 'sequelize'; 2 | import dotenv from 'dotenv'; 3 | import { getClient } from '../index'; 4 | 5 | export const db = {}; 6 | 7 | dotenv.config({ path: `.env.${process.env.ENVIRONMENT_NAME}` }); 8 | 9 | const sequelize = getClient(); 10 | 11 | db.products = require('@database/models/products').model(sequelize, Sequelize.DataTypes); 12 | db.stores = require('@database/models/stores').model(sequelize, Sequelize.DataTypes); 13 | db.addresses = require('@database/models/addresses').model(sequelize, Sequelize.DataTypes); 14 | db.suppliers = require('@database/models/suppliers').model(sequelize, Sequelize.DataTypes); 15 | db.users = require('@database/models/users').model(sequelize, Sequelize.DataTypes); 16 | 17 | db.purchasedProducts = require('@database/models/purchased_products').model(sequelize, Sequelize.DataTypes); 18 | db.storeProducts = require('@database/models/store_products').model(sequelize, Sequelize.DataTypes); 19 | db.supplierProducts = require('@database/models/supplier_products').model(sequelize, Sequelize.DataTypes); 20 | 21 | Object.keys(db).forEach(modelName => { 22 | if (db[modelName].associate) { 23 | db[modelName].associate(db); 24 | } 25 | }); 26 | 27 | db.sequelize = sequelize; 28 | db.Sequelize = sequelize; 29 | 30 | export default db; 31 | -------------------------------------------------------------------------------- /server/database/models/products.js: -------------------------------------------------------------------------------- 1 | export function getAttributes(sequelize, DataTypes) { 2 | return { 3 | id: { 4 | type: DataTypes.INTEGER, 5 | allowNull: false, 6 | primaryKey: true, 7 | autoIncrement: true 8 | }, 9 | name: { 10 | type: DataTypes.TEXT, 11 | allowNull: false 12 | }, 13 | category: { 14 | type: DataTypes.TEXT, 15 | allowNull: false 16 | }, 17 | amount: { 18 | type: DataTypes.BIGINT, 19 | allowNull: false 20 | }, 21 | createdAt: { 22 | field: 'created_at', 23 | type: DataTypes.DATE, 24 | allowNull: true, 25 | defaultValue: sequelize.fn('now') 26 | }, 27 | updatedAt: { 28 | field: 'updated_at', 29 | type: DataTypes.DATE, 30 | allowNull: true 31 | }, 32 | deletedAt: { 33 | field: 'deleted_at', 34 | type: DataTypes.DATE, 35 | allowNull: true 36 | } 37 | }; 38 | } 39 | 40 | export function model(sequelize, DataTypes) { 41 | const products = sequelize.define('products', getAttributes(sequelize, DataTypes), { 42 | tableName: 'products', 43 | paranoid: true, 44 | timestamps: true 45 | }); 46 | 47 | products.associate = function(models) { 48 | products.supplierProducts = products.hasOne(models.supplierProducts, { 49 | foreignKey: 'productId', 50 | sourceKey: 'id' 51 | }); 52 | products.purchasedProducts = products.hasOne(models.purchasedProducts, { 53 | foreignKey: 'productId', 54 | sourceKey: 'id' 55 | }); 56 | products.storeProducts = products.hasOne(models.storeProducts, { 57 | foreignKey: 'product_id', 58 | sourceKey: 'id' 59 | }); 60 | 61 | products.suppliers = products.belongsToMany(models.suppliers, { 62 | through: models.supplierProducts, 63 | otherKey: 'supplier_id', 64 | sourceKey: 'id' 65 | }); 66 | 67 | products.stores = products.belongsToMany(models.stores, { 68 | through: models.storeProducts, 69 | otherKey: 'store_id', 70 | sourceKey: 'id' 71 | }); 72 | }; 73 | return products; 74 | } 75 | -------------------------------------------------------------------------------- /server/database/models/purchased_products.js: -------------------------------------------------------------------------------- 1 | export function getAttributes(sequelize, DataTypes) { 2 | return { 3 | id: { 4 | type: DataTypes.INTEGER, 5 | allowNull: false, 6 | primaryKey: true, 7 | autoIncrement: true 8 | }, 9 | productId: { 10 | field: 'product_id', 11 | type: DataTypes.INTEGER, 12 | allowNull: false, 13 | references: { 14 | model: 'products', 15 | key: 'id' 16 | } 17 | }, 18 | price: { 19 | type: DataTypes.INTEGER, 20 | allowNull: false 21 | }, 22 | discount: { 23 | type: DataTypes.INTEGER, 24 | allowNull: false 25 | }, 26 | deliveryDate: { 27 | field: 'delivery_date', 28 | type: DataTypes.DATE, 29 | allowNull: false 30 | }, 31 | createdAt: { 32 | field: 'created_at', 33 | type: DataTypes.DATE, 34 | allowNull: true, 35 | defaultValue: sequelize.fn('now') 36 | }, 37 | updatedAt: { 38 | field: 'updated_at', 39 | type: DataTypes.DATE, 40 | allowNull: true 41 | }, 42 | deletedAt: { 43 | field: 'deleted_at', 44 | type: DataTypes.DATE, 45 | allowNull: true 46 | }, 47 | storeId: { 48 | field: 'store_id', 49 | type: DataTypes.INTEGER, 50 | allowNull: false, 51 | references: { 52 | model: 'stores', 53 | key: 'id' 54 | } 55 | } 56 | }; 57 | } 58 | 59 | export function model(sequelize, DataTypes) { 60 | const purchasedProducts = sequelize.define('purchased_products', getAttributes(sequelize, DataTypes), { 61 | tableName: 'purchased_products', 62 | paranoid: true, 63 | timestamps: true 64 | }); 65 | 66 | purchasedProducts.associate = function(models) { 67 | purchasedProducts.hasOne(models.products, { 68 | foreignKey: 'id', 69 | sourceKey: 'productId' 70 | }); 71 | purchasedProducts.hasOne(models.storeProducts, { 72 | foreignKey: 'product_id', 73 | sourceKey: 'productId' 74 | }); 75 | purchasedProducts.hasOne(models.supplierProducts, { 76 | foreignKey: 'productId', 77 | sourceKey: 'productId' 78 | }); 79 | purchasedProducts.hasOne(models.stores, { 80 | foreignKey: 'id', 81 | sourceKey: 'storeId' 82 | }); 83 | }; 84 | return purchasedProducts; 85 | } 86 | -------------------------------------------------------------------------------- /server/database/models/store_products.js: -------------------------------------------------------------------------------- 1 | export function getAttributes(sequelize, DataTypes) { 2 | return { 3 | id: { 4 | type: DataTypes.INTEGER, 5 | allowNull: false, 6 | primaryKey: true, 7 | autoIncrement: true 8 | }, 9 | productId: { 10 | field: 'product_id', 11 | type: DataTypes.INTEGER, 12 | allowNull: false, 13 | references: { 14 | model: 'products', 15 | key: 'id' 16 | } 17 | }, 18 | storeId: { 19 | field: 'store_id', 20 | type: DataTypes.INTEGER, 21 | allowNull: false, 22 | references: { 23 | model: 'stores', 24 | key: 'id' 25 | } 26 | }, 27 | createdAt: { 28 | field: 'created_at', 29 | type: DataTypes.DATE, 30 | allowNull: true, 31 | defaultValue: sequelize.fn('now') 32 | }, 33 | updatedAt: { 34 | field: 'updated_at', 35 | type: DataTypes.DATE, 36 | allowNull: true 37 | }, 38 | deletedAt: { 39 | field: 'deleted_at', 40 | type: DataTypes.DATE, 41 | allowNull: true 42 | } 43 | }; 44 | } 45 | 46 | export function model(sequelize, DataTypes) { 47 | const storeProducts = sequelize.define('store_products', getAttributes(sequelize, DataTypes), { 48 | tableName: 'store_products', 49 | paranoid: true, 50 | timestamps: true 51 | }); 52 | 53 | storeProducts.associate = function(models) { 54 | storeProducts.stores = storeProducts.hasOne(models.stores, { 55 | foreignKey: 'id' 56 | }); 57 | storeProducts.products = storeProducts.hasOne(models.products, { 58 | foreignKey: 'id' 59 | }); 60 | }; 61 | return storeProducts; 62 | } 63 | -------------------------------------------------------------------------------- /server/database/models/stores.js: -------------------------------------------------------------------------------- 1 | export function getAttributes(sequelize, DataTypes) { 2 | return { 3 | id: { 4 | type: DataTypes.INTEGER, 5 | allowNull: false, 6 | primaryKey: true, 7 | autoIncrement: true 8 | }, 9 | name: { 10 | type: DataTypes.TEXT, 11 | allowNull: false 12 | }, 13 | addressId: { 14 | field: 'address_id', 15 | type: DataTypes.INTEGER, 16 | allowNull: false, 17 | references: { 18 | model: 'addresses', 19 | key: 'id' 20 | } 21 | }, 22 | createdAt: { 23 | field: 'created_at', 24 | type: DataTypes.DATE, 25 | allowNull: true, 26 | defaultValue: sequelize.fn('now') 27 | }, 28 | updatedAt: { 29 | field: 'updated_at', 30 | type: DataTypes.DATE, 31 | allowNull: true 32 | }, 33 | deletedAt: { 34 | field: 'deleted_at', 35 | type: DataTypes.DATE, 36 | allowNull: true 37 | } 38 | }; 39 | } 40 | 41 | export function model(sequelize, DataTypes) { 42 | const stores = sequelize.define('stores', getAttributes(sequelize, DataTypes), { 43 | tableName: 'stores', 44 | paranoid: true, 45 | timestamps: true 46 | }); 47 | 48 | stores.associate = function(models) { 49 | stores.storeProducts = stores.hasOne(models.storeProducts, { 50 | foreignKey: 'store_id', 51 | sourceKey: 'id' 52 | }); 53 | stores.products = stores.belongsToMany(models.products, { 54 | through: models.storeProducts, 55 | otherKey: 'store_id', 56 | sourceKey: 'id' 57 | }); 58 | 59 | stores.belongsTo(models.addresses, { 60 | targetKey: 'id', 61 | sourceKey: 'address_id' 62 | }); 63 | }; 64 | return stores; 65 | } 66 | -------------------------------------------------------------------------------- /server/database/models/supplier_products.js: -------------------------------------------------------------------------------- 1 | export function getAttributes(sequelize, DataTypes) { 2 | return { 3 | id: { 4 | type: DataTypes.INTEGER, 5 | allowNull: false, 6 | primaryKey: true, 7 | autoIncrement: true 8 | }, 9 | productId: { 10 | field: 'product_id', 11 | type: DataTypes.INTEGER, 12 | allowNull: false, 13 | references: { 14 | model: 'products', 15 | key: 'id' 16 | } 17 | }, 18 | supplierId: { 19 | field: 'supplier_id', 20 | type: DataTypes.INTEGER, 21 | allowNull: false, 22 | references: { 23 | model: 'suppliers', 24 | key: 'id' 25 | } 26 | }, 27 | createdAt: { 28 | field: 'created_at', 29 | type: DataTypes.DATE, 30 | allowNull: true, 31 | defaultValue: sequelize.fn('now') 32 | }, 33 | updatedAt: { 34 | field: 'updated_at', 35 | type: DataTypes.DATE, 36 | allowNull: true 37 | }, 38 | deletedAt: { 39 | field: 'deleted_at', 40 | type: DataTypes.DATE, 41 | allowNull: true 42 | } 43 | }; 44 | } 45 | 46 | export function model(sequelize, DataTypes) { 47 | const supplierProducts = sequelize.define('supplier_products', getAttributes(sequelize, DataTypes), { 48 | tableName: 'supplier_products', 49 | paranoid: true, 50 | timestamps: true 51 | }); 52 | 53 | supplierProducts.associate = function(models) { 54 | supplierProducts.suppliers = supplierProducts.hasOne(models.suppliers, { 55 | foreignKey: 'id' 56 | }); 57 | supplierProducts.products = supplierProducts.hasOne(models.products, { 58 | foreignKey: 'id' 59 | }); 60 | }; 61 | return supplierProducts; 62 | } 63 | -------------------------------------------------------------------------------- /server/database/models/suppliers.js: -------------------------------------------------------------------------------- 1 | export function getAttributes(sequelize, DataTypes) { 2 | return { 3 | id: { 4 | type: DataTypes.INTEGER, 5 | allowNull: false, 6 | primaryKey: true, 7 | autoIncrement: true 8 | }, 9 | name: { 10 | type: DataTypes.TEXT, 11 | allowNull: false 12 | }, 13 | addressId: { 14 | field: 'address_id', 15 | type: DataTypes.INTEGER, 16 | allowNull: false, 17 | references: { 18 | model: 'addresses', 19 | key: 'id' 20 | } 21 | }, 22 | createdAt: { 23 | field: 'created_at', 24 | type: DataTypes.DATE, 25 | allowNull: true, 26 | defaultValue: sequelize.fn('now') 27 | }, 28 | updatedAt: { 29 | field: 'updated_at', 30 | type: DataTypes.DATE, 31 | allowNull: true 32 | }, 33 | deletedAt: { 34 | field: 'deleted_at', 35 | type: DataTypes.DATE, 36 | allowNull: true 37 | } 38 | }; 39 | } 40 | 41 | export function model(sequelize, DataTypes) { 42 | const suppliers = sequelize.define('suppliers', getAttributes(sequelize, DataTypes), { 43 | tableName: 'suppliers', 44 | paranoid: true, 45 | timestamps: true 46 | }); 47 | suppliers.associate = function(models) { 48 | suppliers.supplierProducts = suppliers.hasOne(models.supplierProducts, { 49 | foreignKey: 'supplier_id', 50 | sourceKey: 'id' 51 | }); 52 | suppliers.products = suppliers.belongsToMany(models.products, { 53 | through: models.supplierProducts, 54 | otherKey: 'supplier_id', 55 | sourceKey: 'id' 56 | }); 57 | 58 | suppliers.belongsTo(models.addresses, { 59 | targetKey: 'id', 60 | sourceKey: 'address_id' 61 | }); 62 | }; 63 | return suppliers; 64 | } 65 | -------------------------------------------------------------------------------- /server/database/models/users.js: -------------------------------------------------------------------------------- 1 | export function getAttributes(sequelize, DataTypes) { 2 | return { 3 | id: { 4 | type: DataTypes.INTEGER, 5 | allowNull: false, 6 | primaryKey: true, 7 | autoIncrement: true 8 | }, 9 | firstName: { 10 | field: 'first_name', 11 | type: DataTypes.TEXT, 12 | allowNull: false 13 | }, 14 | lastName: { 15 | field: 'last_name', 16 | type: DataTypes.TEXT, 17 | allowNull: false 18 | }, 19 | email: { 20 | type: DataTypes.TEXT, 21 | allowNull: false, 22 | unique: true 23 | }, 24 | password: { 25 | type: DataTypes.TEXT, 26 | allowNull: false 27 | }, 28 | createdAt: { 29 | field: 'created_at', 30 | type: DataTypes.DATE, 31 | allowNull: true, 32 | defaultValue: sequelize.fn('now') 33 | }, 34 | updatedAt: { 35 | field: 'updated_at', 36 | type: DataTypes.DATE, 37 | allowNull: true 38 | }, 39 | deletedAt: { 40 | field: 'deleted_at', 41 | type: DataTypes.DATE, 42 | allowNull: true 43 | } 44 | }; 45 | } 46 | 47 | export function model(sequelize, DataTypes) { 48 | const users = sequelize.define('users', getAttributes(sequelize, DataTypes), { 49 | tableName: 'users', 50 | paranoid: true, 51 | timestamps: true 52 | }); 53 | return users; 54 | } 55 | -------------------------------------------------------------------------------- /server/database/tests/index.test.js: -------------------------------------------------------------------------------- 1 | import SequelizeMock from 'sequelize-mock'; 2 | import pg from 'pg'; 3 | import { resetAndMockDB } from '@utils/testUtils'; 4 | import { DB_ENV } from '@server/utils/testUtils/dbConfig'; 5 | 6 | const mocks = {}; 7 | describe('getClient', () => { 8 | afterAll(() => { 9 | resetAndMockDB(); 10 | }); 11 | it('successfully get DB Client', async () => { 12 | jest.unmock('@database'); 13 | mocks.sequelize = SequelizeMock; 14 | jest.doMock('sequelize', () => mocks.sequelize); 15 | jest.spyOn(mocks, 'sequelize'); 16 | const { getClient } = require('../../database'); 17 | const client = await getClient(); 18 | await expect(client).toBeInstanceOf(mocks.sequelize); 19 | 20 | expect(mocks.sequelize.mock.calls.length).toEqual(1); 21 | expect(mocks.sequelize.mock.calls[0][0]).toEqual(DB_ENV.DB_URI); 22 | expect(mocks.sequelize.mock.calls[0][1]).toEqual({ 23 | url: DB_ENV.DB_URI, 24 | host: DB_ENV.POSTGRES_HOST, 25 | dialectModule: pg, 26 | dialect: 'postgres', 27 | logging: false, 28 | pool: { 29 | min: 0, 30 | max: 10, 31 | idle: 10000 32 | }, 33 | define: { 34 | underscored: true, 35 | timestamps: false 36 | }, 37 | retry: { 38 | match: [ 39 | 'unknown timed out', 40 | mocks.sequelize.TimeoutError, 41 | 'timed', 42 | 'timeout', 43 | 'TimeoutError', 44 | 'Operation timeout', 45 | 'refuse', 46 | 'SQLITE_BUSY' 47 | ], 48 | max: 10 49 | } 50 | }); 51 | }); 52 | it('throw error on failure', async () => { 53 | jest.unmock('@database'); 54 | mocks.sequelize = SequelizeMock; 55 | jest.doMock('sequelize', () => new Error()); 56 | jest.spyOn(mocks, 'sequelize'); 57 | 58 | const { getClient } = require('../../database'); 59 | await expect(getClient).toThrow(expect.any(Error)); 60 | }); 61 | }); 62 | 63 | describe('connect', () => { 64 | it('successfully connect to the database', async () => { 65 | jest.unmock('@database'); 66 | mocks.sequelize = SequelizeMock; 67 | jest.doMock('sequelize', () => mocks.sequelize); 68 | 69 | const { getClient, connect } = require('../../database'); 70 | const client = await getClient(); 71 | jest.spyOn(client, 'authenticate'); 72 | jest.spyOn(console, 'log'); 73 | await connect(); 74 | expect(client.authenticate.mock.calls.length).toBe(1); 75 | expect(console.log.mock.calls.length).toBe(1); 76 | expect(console.log.mock.calls.length).toBe(1); 77 | expect(console.log.mock.calls[0][0]).toBe('Connection has been established successfully.\n'); 78 | expect(console.log.mock.calls[0][1]).toEqual({ 79 | db_uri: process.env.DB_URI 80 | }); 81 | }); 82 | 83 | it('should throw an error if connection fails', async () => { 84 | jest.unmock('@database'); 85 | mocks.sequelize = SequelizeMock; 86 | jest.doMock('sequelize', () => mocks.sequelize); 87 | 88 | const { getClient, connect } = require('../../database'); 89 | const client = await getClient(); 90 | const error = new Error('failed'); 91 | client.authenticate = async () => { 92 | await expect(connect()).rejects.toEqual(error); 93 | jest.spyOn(client, 'authenticate'); 94 | jest.spyOn(console, 'log'); 95 | throw error; 96 | }; 97 | }); 98 | }); 99 | -------------------------------------------------------------------------------- /server/gql/auth/index.js: -------------------------------------------------------------------------------- 1 | import { Token } from '@utils/token'; 2 | import { getUserByEmailPassword, createUserBySignup } from '@daos/auth'; 3 | import { logger } from '@server/utils'; 4 | const getSignedToken = user => new Token({ user }).get(); 5 | 6 | export const handleSignUp = async (parent, args, context, resolveInfo) => { 7 | try { 8 | const { firstName, lastName, email, password } = args; 9 | const newUser = await createUserBySignup(firstName, lastName, email, password); 10 | const token = getSignedToken(newUser); 11 | const { dataValues } = newUser; 12 | delete dataValues.password; 13 | return { ...dataValues, token }; 14 | } catch (err) { 15 | logger().info('error:::', err); 16 | logger().info(err); 17 | throw err; 18 | } 19 | }; 20 | 21 | export const handleSignIn = async (parent, args, context, resolveInfo) => { 22 | try { 23 | const { email, password } = args; 24 | const user = await getUserByEmailPassword(email, password); 25 | if (!user) { 26 | throw new Error('User not found!'); 27 | } 28 | return { token: getSignedToken(user?.dataValues) }; 29 | } catch (err) { 30 | logger().error(err); 31 | throw new Error(err.message); 32 | } 33 | }; 34 | -------------------------------------------------------------------------------- /server/gql/auth/tests/index.test.js: -------------------------------------------------------------------------------- 1 | import { getResponse } from '@server/utils/testUtils'; 2 | 3 | describe('handleSignUp tests', () => { 4 | const token = 'token'; 5 | const user = { 6 | id: 1, 7 | firstName: 'John', 8 | lastName: 'Doe', 9 | email: 'john@doe.com', 10 | password: '12345', 11 | createdAt: '2022-08-22T06:18:41.941Z', 12 | updatedAt: '2022-08-22T06:18:41.932Z' 13 | }; 14 | 15 | describe('signup tests', () => { 16 | it('should ensure that the user is able to signup if the database case succeeds', async () => { 17 | const authDaos = require('@daos/auth'); 18 | jest.spyOn(authDaos, 'createUserBySignup').mockImplementation(() => ({ dataValues: user })); 19 | 20 | const mutation = `mutation SignUp { 21 | signUp(firstName: "${user.firstName}", 22 | lastName: "${user.lastName}", 23 | email: "${user.email}", 24 | password: "${user.password}") { 25 | id 26 | email 27 | firstName 28 | lastName 29 | token 30 | createdAt 31 | updatedAt 32 | } 33 | }`; 34 | 35 | const res = await getResponse(mutation); 36 | const u = { ...user, id: `${user.id}` }; 37 | delete u.password; 38 | expect(res.body.data.signUp).toStrictEqual({ ...u, token }); 39 | }); 40 | it('should ensure that the user is unable to signup if the database entry fails', async () => { 41 | const error = 'User already exists'; 42 | const authDaos = require('@daos/auth'); 43 | jest.spyOn(authDaos, 'createUserBySignup').mockRejectedValue(new Error(error)); 44 | 45 | const mutation = `mutation SignUp { 46 | signUp(firstName: "${user.firstName}", 47 | lastName: "${user.lastName}", 48 | email: "${user.email}", 49 | password: "${user.password}") { 50 | id 51 | email 52 | firstName 53 | lastName 54 | token 55 | createdAt 56 | updatedAt 57 | } 58 | }`; 59 | 60 | const res = await getResponse(mutation); 61 | expect(res.body.errors).toStrictEqual([error]); 62 | }); 63 | }); 64 | 65 | describe('signin tests', () => { 66 | it('should ensure that the user with the right credentials is given a token', async () => { 67 | const authDaos = require('@daos/auth'); 68 | jest.spyOn(authDaos, 'getUserByEmailPassword').mockImplementation(() => user); 69 | const mutation = `mutation SignIn { 70 | signIn(email: "${user.email}", password: "${user.password}") { 71 | token 72 | } 73 | }`; 74 | const res = await getResponse(mutation); 75 | expect(res.body.data.signIn.token).toStrictEqual(token); 76 | }); 77 | it('should ensure that the user without the right credentials is not given a token', async () => { 78 | const error = 'User does not exist'; 79 | const authDaos = require('@daos/auth'); 80 | jest.spyOn(authDaos, 'getUserByEmailPassword').mockRejectedValue(new Error(error)); 81 | const mutation = `mutation SignIn { 82 | signIn(email: "${user.email}", password: "${user.password}") { 83 | token 84 | } 85 | }`; 86 | const res = await getResponse(mutation); 87 | expect(res.body.errors).toStrictEqual([error]); 88 | }); 89 | }); 90 | }); 91 | -------------------------------------------------------------------------------- /server/gql/fields/args/index.js: -------------------------------------------------------------------------------- 1 | import { GraphQLInt, GraphQLNonNull } from 'graphql'; 2 | 3 | export const customListArgs = { 4 | limit: { 5 | type: new GraphQLNonNull(GraphQLInt), 6 | description: 'Use with offset to get paginated results with total' 7 | }, 8 | offset: { 9 | type: new GraphQLNonNull(GraphQLInt), 10 | description: 'Use with offset to get paginated results with total' 11 | }, 12 | before: { type: GraphQLInt, description: 'Use with grapql-relay compliant queries' }, 13 | after: { type: GraphQLInt, description: 'Use with grapql-relay compliant queries' }, 14 | first: { type: GraphQLInt, description: 'Use with grapql-relay compliant queries' }, 15 | last: { type: GraphQLInt, description: 'Use with grapql-relay compliant queries' } 16 | }; 17 | -------------------------------------------------------------------------------- /server/gql/fields/timestamps/index.js: -------------------------------------------------------------------------------- 1 | import { GraphQLDateTime } from 'graphql-iso-date'; 2 | export const timestamps = { 3 | createdAt: { type: GraphQLDateTime }, 4 | updatedAt: { type: GraphQLDateTime }, 5 | deletedAt: { type: GraphQLDateTime } 6 | }; 7 | -------------------------------------------------------------------------------- /server/gql/models/addresses/index.js: -------------------------------------------------------------------------------- 1 | import { GraphQLFloat, GraphQLID, GraphQLInt, GraphQLNonNull, GraphQLObjectType, GraphQLString } from 'graphql'; 2 | import { getNode } from '@gql/node'; 3 | import { createConnection } from 'graphql-sequelize'; 4 | import { supplierLists } from '../suppliers'; 5 | import { timestamps } from '@gql/fields/timestamps'; 6 | import db from '@database/models'; 7 | import { storeLists } from '@gql/models/stores'; 8 | import { totalConnectionFields, listResolver, baseListResolver } from '@utils/index'; 9 | import { getQueryFields, TYPE_ATTRIBUTES } from '@server/utils/gqlFieldUtils'; 10 | 11 | const { nodeInterface } = getNode(); 12 | export const addressFields = { 13 | id: { type: new GraphQLNonNull(GraphQLID) }, 14 | address1: { 15 | type: GraphQLString, 16 | extensions: ['@uppercase', 'uppercase'], 17 | resolve: (source, args, context, info) => source.address1 18 | }, 19 | address2: { type: GraphQLString }, 20 | city: { type: GraphQLString }, 21 | country: { type: GraphQLString }, 22 | latitude: { 23 | type: new GraphQLNonNull(GraphQLFloat) 24 | }, 25 | longitude: { 26 | type: new GraphQLNonNull(GraphQLFloat) 27 | } 28 | }; 29 | const GraphQLAddress = new GraphQLObjectType({ 30 | name: 'Address', 31 | interfaces: [nodeInterface], 32 | sqlPaginate: true, 33 | orderBy: { 34 | created_at: 'desc', 35 | id: 'asc' 36 | }, 37 | fields: () => ({ 38 | ...getQueryFields(addressFields, TYPE_ATTRIBUTES.isNonNull), 39 | ...timestamps, 40 | suppliers: { 41 | ...supplierLists.list, 42 | resolve: (source, args, context, info) => 43 | listResolver(supplierLists, source, args, { ...context, address: source.dataValues }, info) 44 | }, 45 | stores: { 46 | ...storeLists.list, 47 | resolve: (source, args, context, info) => 48 | listResolver(storeLists, source, args, { ...context, address: source.dataValues }, info) 49 | } 50 | }) 51 | }); 52 | 53 | const AddressConnection = createConnection({ 54 | name: 'addresses', 55 | target: db.addresses, 56 | nodeType: GraphQLAddress, 57 | before: (findOptions, args, context) => { 58 | findOptions.include = findOptions.include || []; 59 | if (context?.supplier?.id) { 60 | findOptions.include.push({ 61 | model: db.suppliers, 62 | where: { 63 | id: context.supplier.id 64 | } 65 | }); 66 | } 67 | 68 | if (context?.store?.id) { 69 | findOptions.include.push({ 70 | model: db.stores, 71 | where: { 72 | id: context.store.id 73 | } 74 | }); 75 | } 76 | return findOptions; 77 | }, 78 | ...totalConnectionFields 79 | }); 80 | 81 | export { AddressConnection, GraphQLAddress }; 82 | 83 | // queries on the address table 84 | export const addressQueries = { 85 | args: { 86 | id: { 87 | type: new GraphQLNonNull(GraphQLInt) 88 | } 89 | }, 90 | query: { 91 | type: GraphQLAddress 92 | }, 93 | model: db.addresses 94 | }; 95 | 96 | // lists on the address table. 97 | export const addressLists = { 98 | list: { 99 | ...AddressConnection, 100 | resolve: (...args) => baseListResolver(AddressConnection, ...args), 101 | type: AddressConnection.connectionType, 102 | args: AddressConnection.connectionArgs 103 | }, 104 | model: db.addresses 105 | }; 106 | 107 | export const addressMutations = { 108 | args: addressFields, 109 | type: GraphQLAddress, 110 | model: db.addresses 111 | }; 112 | -------------------------------------------------------------------------------- /server/gql/models/aggregate/index.js: -------------------------------------------------------------------------------- 1 | import { GraphQLFloat, GraphQLObjectType, GraphQLString } from 'graphql'; 2 | import { GraphQLDateTime } from 'graphql-iso-date'; 3 | import { client } from '@database'; 4 | import { handleAggregateQueries, queryOptions, queryRedis } from './purchasedProductsUtils'; 5 | 6 | const Aggregate = new GraphQLObjectType({ 7 | name: 'Aggregate', 8 | fields: () => ({ 9 | total: { 10 | type: new GraphQLObjectType({ 11 | name: 'AggregateSum', 12 | fields: () => ({ 13 | purchasedProductsPrice: { 14 | name: 'TotalPriceOfPurchasedProducts', 15 | type: GraphQLFloat, 16 | resolve: async args => queryRedis('total', args) 17 | } 18 | }) 19 | }), 20 | resolve: args => args 21 | }, 22 | max: { 23 | type: new GraphQLObjectType({ 24 | name: 'AggregateMaximum', 25 | fields: () => ({ 26 | purchasedProductsPrice: { 27 | name: 'MaxPriceOfPurchasedProducts', 28 | type: GraphQLFloat, 29 | resolve: async args => { 30 | const query = `SELECT MAX(price) from purchased_products`; 31 | const { where, join } = handleAggregateQueries(args, 'purchased_products'); 32 | return (await client.query(`${query} ${join} ${where};`, queryOptions(args)))[0].max || 0; 33 | } 34 | } 35 | }) 36 | }), 37 | resolve: args => args 38 | }, 39 | count: { 40 | type: new GraphQLObjectType({ 41 | name: 'AggregateCount', 42 | fields: () => ({ 43 | purchasedProducts: { 44 | name: 'CountOfPurchasedProducts', 45 | type: GraphQLFloat, 46 | resolve: async args => queryRedis('count', args) 47 | } 48 | }) 49 | }), 50 | resolve: args => args 51 | } 52 | }) 53 | }); 54 | 55 | const AggregateType = { 56 | type: Aggregate, 57 | args: { 58 | startDate: { type: GraphQLDateTime }, 59 | endDate: { type: GraphQLDateTime }, 60 | category: { type: GraphQLString } 61 | }, 62 | resolve: (_, args) => args 63 | }; 64 | export { AggregateType as Aggregate }; 65 | -------------------------------------------------------------------------------- /server/gql/models/aggregate/purchasedProductsUtils.js: -------------------------------------------------------------------------------- 1 | import moment from 'moment'; 2 | import { QueryTypes } from 'sequelize'; 3 | import { addWhereClause } from '@utils'; 4 | import { TIMESTAMP } from '@utils/constants'; 5 | import { getEarliestCreatedDate } from '@server/daos/purchasedProducts'; 6 | import { redis } from '@server/services/redis'; 7 | import { sendMessage } from '@server/services/slack'; 8 | import { logger } from '@server/utils'; 9 | 10 | export const handleAggregateQueries = (args, tableName) => { 11 | let where = ``; 12 | let join = ``; 13 | const addQuery = suffix => (tableName ? `${tableName}.` : '') + suffix; 14 | if (args?.startDate) { 15 | where = addWhereClause(where, `${addQuery(`created_at`)} > :startDate`); 16 | } 17 | if (args?.endDate) { 18 | where = addWhereClause(where, `${addQuery(`created_at`)} < :endDate`); 19 | } 20 | if (args?.category) { 21 | join = `LEFT JOIN products on products.id=purchased_products.product_id`; 22 | where = addWhereClause(where, `products.category = :category`); 23 | } 24 | return { where, join }; 25 | }; 26 | export const queryOptions = args => ({ 27 | replacements: { 28 | type: QueryTypes.SELECT, 29 | startDate: moment(args?.startDate).format(TIMESTAMP), 30 | endDate: moment(args?.endDate).format(TIMESTAMP), 31 | category: args?.category 32 | }, 33 | type: QueryTypes.SELECT 34 | }); 35 | 36 | export const queryRedis = async (type, args) => { 37 | let startDate; 38 | let endDate; 39 | let count = 0; 40 | if (!args?.startDate) { 41 | const createdAtDates = await getEarliestCreatedDate(); 42 | startDate = createdAtDates; 43 | } else { 44 | startDate = args.startDate.toISOString().split('T')[0]; 45 | } 46 | if (!args?.endDate) { 47 | endDate = moment().format('YYYY-MM-DD'); 48 | } else { 49 | endDate = args.endDate.toISOString().split('T')[0]; 50 | } 51 | const key = args?.category ? `${startDate}_${args.category}` : `${startDate}_total`; 52 | while (startDate <= endDate) { 53 | let aggregateData; 54 | const totalForDate = await redis.get(key); 55 | if (totalForDate) { 56 | try { 57 | aggregateData = JSON.parse(totalForDate); 58 | count += Number(aggregateData[type]); 59 | } catch (err) { 60 | sendMessage(`Error while parsing data for ${key} as got value ${totalForDate}`); 61 | logger().info(`Error while parsing data for ${key} as got value ${totalForDate}`); 62 | } 63 | } 64 | startDate = moment(startDate) 65 | .add(1, 'day') 66 | .format('YYYY-MM-DD'); 67 | } 68 | return count; 69 | }; 70 | -------------------------------------------------------------------------------- /server/gql/models/aggregate/tests/index.test.js: -------------------------------------------------------------------------------- 1 | import { getResponse, mockDBClient, resetAndMockDB } from '@server/utils/testUtils'; 2 | 3 | describe('Aggregate query tests', () => { 4 | let type; 5 | let category; 6 | const input = `startDate: "0001-12-03T10:15:30Z", endDate: "3020-12-03T10:15:30Z", category:"general"`; 7 | let mocks; 8 | const result = { 9 | sum: 3499714200, 10 | count: 1000, 11 | max: 9999 12 | }; 13 | let dbClient; 14 | beforeEach(() => { 15 | dbClient = mockDBClient(); 16 | resetAndMockDB(null, {}, dbClient); 17 | mocks = { 18 | handleAggregateQueries: jest.fn(() => ({ where: '', join: '' })), 19 | queryOptions: jest.fn(() => ({})), 20 | queryRedis: jest.fn(() => ({})) 21 | }; 22 | jest.doMock('../purchasedProductsUtils', () => ({ 23 | ...mocks 24 | })); 25 | jest.spyOn(dbClient.client, 'query'); 26 | }); 27 | 28 | it('should call query redis with the given type as total and args for sum query', async () => { 29 | type = 'total'; 30 | category = 'Sports'; 31 | const query = `query Query { 32 | aggregate(category: "${category}") { 33 | ${type} { 34 | purchasedProductsPrice 35 | } 36 | } 37 | }`; 38 | 39 | await getResponse(query); 40 | expect(mocks.queryRedis.mock.calls.length).toBe(1); 41 | expect(mocks.queryRedis.mock.calls[0]).toEqual([`${type}`, { category: `${category}` }]); 42 | }); 43 | it('should call query redis with the given type as count and args for count query', async () => { 44 | type = 'count'; 45 | const query = `query Query { 46 | aggregate { 47 | ${type} { 48 | purchasedProducts 49 | } 50 | } 51 | }`; 52 | 53 | await getResponse(query); 54 | expect(mocks.queryRedis.mock.calls.length).toBe(1); 55 | expect(mocks.queryRedis.mock.calls[0]).toEqual([`${type}`, {}]); 56 | }); 57 | it('should be able to get the max purchasedProducts', async () => { 58 | dbClient.client.$queueResult([{ max: result.max }]); 59 | const res = await getResponse(`query Aggregate { 60 | aggregate (${input}){ 61 | max { 62 | purchasedProductsPrice 63 | } 64 | } 65 | }`); 66 | expect(res.body.data.aggregate.max.purchasedProductsPrice).toEqual(result.max); 67 | 68 | expect(dbClient.client.query.mock.calls.length).toEqual(1); 69 | expect(dbClient.client.query.mock.calls[0][0]).toContain('SELECT MAX(price) from purchased_products'); 70 | 71 | expect(mocks.handleAggregateQueries.mock.calls.length).toEqual(1); 72 | expect(mocks.queryOptions.mock.calls.length).toEqual(1); 73 | }); 74 | it('should return 0 if it fails to get a value from the query', async () => { 75 | dbClient.client.$queueResult([{ max: 0 }]); 76 | const res = await getResponse(`query Aggregate { 77 | aggregate (${input}){ 78 | max { 79 | purchasedProductsPrice 80 | } 81 | } 82 | }`); 83 | expect(res.body.data.aggregate.max.purchasedProductsPrice).toEqual(0); 84 | expect(dbClient.client.query.mock.calls.length).toEqual(1); 85 | }); 86 | }); 87 | -------------------------------------------------------------------------------- /server/gql/models/aggregate/tests/purchasedProductsUtils.test.js: -------------------------------------------------------------------------------- 1 | import { QueryTypes } from 'sequelize'; 2 | import moment from 'moment'; 3 | import { redis } from '@services/redis'; 4 | import { handleAggregateQueries, queryOptions, queryRedis } from '@gql/models/aggregate/purchasedProductsUtils'; 5 | import { TIMESTAMP } from '@utils/constants'; 6 | import * as utils from '@utils'; 7 | 8 | describe('handleAggregateQueries', () => { 9 | it('should the appropriate rawSQL queries based on the args', async () => { 10 | const tableName = 'table_name'; 11 | const startDateAggregationQuery = handleAggregateQueries({ startDate: 1 }, 'table_name'); 12 | expect(startDateAggregationQuery.where).toContain(`WHERE ( ${tableName}.created_at > :startDate )`); 13 | 14 | const endDateAggregationQuery = handleAggregateQueries({ endDate: 1 }); 15 | expect(endDateAggregationQuery.where).toContain('WHERE ( created_at < :endDate )'); 16 | 17 | const categoryAggregationQuery = handleAggregateQueries({ category: 'general' }); 18 | expect(categoryAggregationQuery.where).toContain('WHERE ( products.category = :category )'); 19 | 20 | const multiWhereAggregation = handleAggregateQueries({ category: 'general', endDate: 1, startDate: 1 }); 21 | expect(multiWhereAggregation.where).toContain('( products.category = :category )'); 22 | expect(multiWhereAggregation.where).toContain('( created_at < :endDate )'); 23 | expect(multiWhereAggregation.where).toContain('( created_at > :startDate )'); 24 | }); 25 | }); 26 | describe('queryOptions', () => { 27 | it('should return object with the correct key value pairs', async () => { 28 | const category = 'general'; 29 | const res = await queryOptions({ startDate: 0, endDate: 0, category }); 30 | expect(res.replacements).toBeTruthy(); 31 | expect(res.type).toEqual(QueryTypes.SELECT); 32 | expect(res.replacements.startDate).toBe(moment(0).format(TIMESTAMP)); 33 | expect(res.replacements.endDate).toBe(moment(0).format(TIMESTAMP)); 34 | expect(res.replacements.category).toBe(category); 35 | expect(res.replacements.type).toEqual(QueryTypes.SELECT); 36 | }); 37 | 38 | describe('query redis tests', () => { 39 | const args = { 40 | category: 'Sports' 41 | }; 42 | const type = 'total'; 43 | it('should calculate the total from earliest created date to todays day-1 if no start and end date is provided', async () => { 44 | const spy = jest.spyOn(redis, 'get'); 45 | await queryRedis(type); 46 | expect(spy).toBeCalledWith(`${moment().format('YYYY-MM-DD')}_total`); 47 | }); 48 | it('should add values from the given start and end dates in args ', async () => { 49 | const args = { 50 | startDate: new Date(2022, 2, 1), 51 | endDate: new Date(2022, 2, 4) 52 | }; 53 | const spy = jest.spyOn(redis, 'get'); 54 | await queryRedis(type, args); 55 | expect(spy).toBeCalledTimes(4); 56 | }); 57 | it('should call the date with category if the category is provided in args', async () => { 58 | await queryRedis(type, args); 59 | const spy = jest.spyOn(redis, 'get'); 60 | await queryRedis(type, args); 61 | expect(spy).toBeCalledWith(`${moment().format('YYYY-MM-DD')}_${args.category}`); 62 | }); 63 | it('should add the value after getting from Redis and return the total', async () => { 64 | jest.spyOn(redis, 'get').mockReturnValue( 65 | JSON.stringify({ 66 | total: 15, 67 | count: 2 68 | }) 69 | ); 70 | const res = await queryRedis(type, args); 71 | expect(res).toBe(15); 72 | }); 73 | it('should throw error and also send slack message if there is problem in parsing JSON', async () => { 74 | jest.spyOn(redis, 'get').mockReturnValue('test'); 75 | const spy = jest.spyOn(utils, 'logger').mockImplementation(() => { 76 | const obj = { 77 | info: msg => msg, 78 | error: err => err 79 | }; 80 | return obj; 81 | }); 82 | await queryRedis(type, args); 83 | expect(spy).toBeCalledTimes(2); 84 | }); 85 | }); 86 | }); 87 | -------------------------------------------------------------------------------- /server/gql/models/products/index.js: -------------------------------------------------------------------------------- 1 | import { GraphQLID, GraphQLInt, GraphQLNonNull, GraphQLObjectType, GraphQLString } from 'graphql'; 2 | import { createConnection } from 'graphql-sequelize'; 3 | import { getNode } from '@gql/node'; 4 | import { supplierLists } from '../suppliers'; 5 | import { storeLists } from '../stores'; 6 | import { timestamps } from '@gql/fields/timestamps'; 7 | import db from '@database/models'; 8 | import { sequelizedWhere } from '@database/dbUtils'; 9 | import { totalConnectionFields, listResolver, baseListResolver } from '@utils/index'; 10 | import { getQueryFields, TYPE_ATTRIBUTES } from '@server/utils/gqlFieldUtils'; 11 | 12 | const { nodeInterface } = getNode(); 13 | export const productFields = { 14 | id: { type: new GraphQLNonNull(GraphQLID) }, 15 | name: { type: GraphQLString }, 16 | category: { type: GraphQLString }, 17 | amount: { type: GraphQLInt } 18 | }; 19 | 20 | // Product 21 | export const GraphQLProduct = new GraphQLObjectType({ 22 | name: 'Product', 23 | interfaces: [nodeInterface], 24 | fields: () => ({ 25 | ...getQueryFields(productFields, TYPE_ATTRIBUTES.isNonNull), 26 | ...timestamps, 27 | suppliers: { 28 | ...supplierLists.list, 29 | resolve: (source, args, context, info) => 30 | listResolver(supplierLists, source, args, { ...context, product: source.dataValues }, info) 31 | }, 32 | stores: { 33 | ...storeLists.list, 34 | resolve: (source, args, context, info) => 35 | listResolver(storeLists, source, args, { ...context, product: source.dataValues }, info) 36 | } 37 | }) 38 | }); 39 | 40 | // relay compliant list 41 | export const ProductConnection = createConnection({ 42 | nodeType: GraphQLProduct, 43 | name: 'products', 44 | target: db.products, 45 | before: (findOptions, args, context) => { 46 | findOptions.include = findOptions.include || []; 47 | if (context?.purchasedProduct?.id) { 48 | findOptions.include.push({ 49 | model: db.purchasedProducts, 50 | where: { 51 | id: context.purchasedProduct.id 52 | } 53 | }); 54 | } 55 | 56 | if (context?.supplier?.id) { 57 | findOptions.include.push({ 58 | model: db.suppliers, 59 | where: { 60 | id: context.supplier?.id 61 | } 62 | }); 63 | } 64 | 65 | if (context?.store?.id) { 66 | findOptions.include.push({ 67 | model: db.stores, 68 | where: { 69 | id: context.store?.id 70 | } 71 | }); 72 | } 73 | 74 | if (context?.supplierProduct?.id) { 75 | findOptions.include.push({ 76 | model: db.supplierProducts, 77 | where: { 78 | id: context.supplierProduct.id 79 | } 80 | }); 81 | } 82 | 83 | if (context?.storeProduct?.productId) { 84 | findOptions.include.push({ 85 | model: db.storeProducts, 86 | where: { 87 | productId: context.storeProduct.productId 88 | } 89 | }); 90 | } 91 | findOptions.where = sequelizedWhere(findOptions.where, args.where); 92 | return findOptions; 93 | }, 94 | ...totalConnectionFields 95 | }); 96 | 97 | // queries on the product table 98 | export const productQueries = { 99 | args: { 100 | id: { 101 | type: new GraphQLNonNull(GraphQLInt) 102 | } 103 | }, 104 | query: { 105 | type: GraphQLProduct 106 | }, 107 | model: db.products 108 | }; 109 | 110 | // lists on the product table. 111 | export const productLists = { 112 | list: { 113 | ...ProductConnection, 114 | resolve: (...args) => baseListResolver(ProductConnection, ...args), 115 | type: ProductConnection.connectionType, 116 | args: ProductConnection.connectionArgs 117 | }, 118 | model: db.products 119 | }; 120 | 121 | export const productMutations = { 122 | args: productFields, 123 | type: GraphQLProduct, 124 | model: db.products 125 | }; 126 | -------------------------------------------------------------------------------- /server/gql/models/purchasedProducts/customCreateResolver.js: -------------------------------------------------------------------------------- 1 | import moment from 'moment'; 2 | import { insertPurchasedProducts } from '@daos/purchasedProducts'; 3 | import { transformSQLError } from '@utils'; 4 | import { redis } from '@services/redis'; 5 | import { SUBSCRIPTION_TOPICS } from '@server/utils/constants'; 6 | import { pubsub } from '@server/utils/pubsub'; 7 | import { getCategoryById } from '@server/daos/products'; 8 | 9 | export const updateRedis = async res => { 10 | const currentDate = moment().format('YYYY-MM-DD'); 11 | const category = await getCategoryById(res.productId); 12 | const redisAggregate = JSON.parse(await redis.get(`${currentDate}_total`)); 13 | const redisAggregateCategory = JSON.parse(await redis.get(`${currentDate}_${category}`)); 14 | redis.set( 15 | `${currentDate}_${category}`, 16 | JSON.stringify({ 17 | total: redisAggregateCategory?.total + res.price || res.price, 18 | count: redisAggregateCategory?.count + 1 || 1 19 | }) 20 | ); 21 | redis.set( 22 | `${currentDate}_total`, 23 | JSON.stringify({ 24 | total: redisAggregate?.total + res.price || res.price, 25 | count: redisAggregate?.count + 1 || 1 26 | }) 27 | ); 28 | }; 29 | 30 | export const publishMessage = async (args, res) => { 31 | pubsub.publish(SUBSCRIPTION_TOPICS.NEW_PURCHASED_PRODUCT, { 32 | newPurchasedProduct: { 33 | productId: res.productId, 34 | deliveryDate: res.deliveryDate, 35 | price: res.price, 36 | storeId: res.storeId 37 | } 38 | }); 39 | }; 40 | export default async (model, args, context) => { 41 | try { 42 | const res = await insertPurchasedProducts(args); 43 | updateRedis(res); 44 | publishMessage(res, args); 45 | return res; 46 | } catch (err) { 47 | throw transformSQLError(err); 48 | } 49 | }; 50 | -------------------------------------------------------------------------------- /server/gql/models/purchasedProducts/index.js: -------------------------------------------------------------------------------- 1 | import { GraphQLID, GraphQLInt, GraphQLNonNull, GraphQLObjectType } from 'graphql'; 2 | import { createConnection } from 'graphql-sequelize'; 3 | import { productLists } from '../products'; 4 | import { timestamps } from '@gql/fields/timestamps'; 5 | import { GraphQLDateTime } from 'graphql-iso-date'; 6 | import { getNode } from '@gql/node'; 7 | import db from '@database/models'; 8 | import { totalConnectionFields, listResolver, baseListResolver } from '@utils/index'; 9 | import { sequelizedWhere } from '@database/dbUtils'; 10 | import customCreateResolver from './customCreateResolver'; 11 | import { getQueryFields, CREATE_AND_QUERY_REQUIRED_ARGS, TYPE_ATTRIBUTES } from '@utils/gqlFieldUtils'; 12 | const { nodeInterface } = getNode(); 13 | 14 | export const purchasedProductFields = { 15 | id: { type: GraphQLID, ...CREATE_AND_QUERY_REQUIRED_ARGS }, 16 | price: { type: GraphQLInt, ...CREATE_AND_QUERY_REQUIRED_ARGS }, 17 | discount: { type: GraphQLInt, ...CREATE_AND_QUERY_REQUIRED_ARGS }, 18 | deliveryDate: { type: GraphQLDateTime, [TYPE_ATTRIBUTES.isUpdateRequired]: true, ...CREATE_AND_QUERY_REQUIRED_ARGS }, 19 | productId: { type: GraphQLID, ...CREATE_AND_QUERY_REQUIRED_ARGS }, 20 | storeId: { type: GraphQLID, ...CREATE_AND_QUERY_REQUIRED_ARGS } 21 | }; 22 | const GraphQLPurchasedProduct = new GraphQLObjectType({ 23 | name: 'PurchasedProduct', 24 | interfaces: [nodeInterface], 25 | fields: () => ({ 26 | ...getQueryFields(purchasedProductFields, TYPE_ATTRIBUTES.isNonNull), 27 | ...timestamps, 28 | products: { 29 | ...productLists.list, 30 | resolve: (source, args, context, info) => 31 | listResolver(productLists, source, args, { ...context, purchasedProduct: source.dataValues }, info) 32 | } 33 | }) 34 | }); 35 | 36 | const PurchasedProductConnection = createConnection({ 37 | name: 'purchasedProducts', 38 | target: db.purchasedProducts, 39 | before: (findOptions, args, context) => { 40 | findOptions.include = findOptions.include || []; 41 | findOptions.where = sequelizedWhere(findOptions.where, args.where); 42 | return findOptions; 43 | }, 44 | nodeType: GraphQLPurchasedProduct, 45 | ...totalConnectionFields 46 | }); 47 | 48 | // queries on the purchasedProducts table 49 | export const purchasedProductQueries = { 50 | args: { 51 | id: { 52 | type: new GraphQLNonNull(GraphQLInt) 53 | } 54 | }, 55 | query: { 56 | type: GraphQLPurchasedProduct 57 | }, 58 | model: db.purchasedProducts 59 | }; 60 | 61 | // lists on the purchasedProducts table 62 | export const purchasedProductLists = { 63 | list: { 64 | ...PurchasedProductConnection, 65 | resolve: (...args) => baseListResolver(PurchasedProductConnection, ...args), 66 | type: PurchasedProductConnection.connectionType, 67 | args: PurchasedProductConnection.connectionArgs 68 | }, 69 | model: db.purchasedProducts 70 | }; 71 | 72 | export const purchasedProductMutations = { 73 | args: purchasedProductFields, 74 | type: GraphQLPurchasedProduct, 75 | model: db.purchasedProducts, 76 | customCreateResolver 77 | }; 78 | -------------------------------------------------------------------------------- /server/gql/models/purchasedProducts/tests/customCreateResolver.test.js: -------------------------------------------------------------------------------- 1 | import { getResponse } from '@utils/testUtils'; 2 | import { updateRedis } from '@gql/models/purchasedProducts/customCreateResolver'; 3 | import { redis } from '@services/redis'; 4 | import moment from 'moment'; 5 | 6 | describe('custom resolver tests', () => { 7 | const createQuery = ` 8 | mutation { 9 | createPurchasedProduct ( 10 | price: 123321, 11 | discount: 121, 12 | productId: 1876, 13 | deliveryDate: "2016-07-20T17:30:15+05:30", 14 | storeId: 1 15 | ) { 16 | id 17 | price 18 | } 19 | } 20 | `; 21 | const res = { 22 | discount: 121, 23 | deliveryDate: '2016-07-20T12:00:15.000Z', 24 | productId: 1, 25 | id: 1, 26 | price: 123, 27 | storeId: 1 28 | }; 29 | it('should should set the values of the response in redis store', async () => { 30 | const spy = jest.spyOn(redis, 'set'); 31 | await updateRedis(res); 32 | expect(spy).toBeCalledTimes(2); 33 | expect(spy.mock.calls[1]).toMatchObject([`${moment().format('YYYY-MM-DD')}_total`, '{"total":123,"count":1}']); 34 | }); 35 | it('should create a new row in purchased Products', async () => { 36 | const customCreateResolver = require(`../customCreateResolver`); 37 | const spy = jest.spyOn(customCreateResolver, 'default'); 38 | await getResponse(createQuery); 39 | expect(spy).toBeCalled(); 40 | }); 41 | it('should throw custom error when there is error in creatingPurchasedProducts ', async () => { 42 | const purchasedProducts = require('@daos/purchasedProducts'); 43 | const utils = require('@utils'); 44 | jest.spyOn(purchasedProducts, 'insertPurchasedProducts').mockImplementation(() => { 45 | throw new Error(); 46 | }); 47 | const throwSpy = jest.spyOn(utils, 'transformSQLError'); 48 | await getResponse(createQuery); 49 | expect(throwSpy).toBeCalled(); 50 | }); 51 | }); 52 | -------------------------------------------------------------------------------- /server/gql/models/storeProducts/index.js: -------------------------------------------------------------------------------- 1 | import { GraphQLID, GraphQLInt, GraphQLNonNull, GraphQLObjectType } from 'graphql'; 2 | import { createConnection } from 'graphql-sequelize'; 3 | import { productLists } from '../products'; 4 | import { storeLists } from '../stores'; 5 | import { timestamps } from '@gql/fields/timestamps'; 6 | import { getNode } from '@gql/node'; 7 | import db from '@database/models'; 8 | import { totalConnectionFields, listResolver, baseListResolver } from '@utils/index'; 9 | import { sequelizedWhere } from '@database/dbUtils'; 10 | import { getQueryFields, TYPE_ATTRIBUTES } from '@server/utils/gqlFieldUtils'; 11 | 12 | const { nodeInterface } = getNode(); 13 | 14 | export const storeProductFields = { 15 | id: { type: new GraphQLNonNull(GraphQLID) }, 16 | productId: { type: GraphQLInt }, 17 | storeId: { type: GraphQLInt } 18 | }; 19 | export const GraphQLStoreProduct = new GraphQLObjectType({ 20 | name: 'StoreProduct', 21 | interfaces: [nodeInterface], 22 | fields: () => ({ 23 | ...getQueryFields(storeProductFields, TYPE_ATTRIBUTES.isNonNull), 24 | ...timestamps, 25 | products: { 26 | ...productLists.list, 27 | resolve: (source, args, context, info) => 28 | listResolver(productLists, source, args, { ...context, storeProduct: source.dataValues }, info) 29 | }, 30 | stores: { 31 | ...storeLists.list, 32 | resolve: (source, args, context, info) => 33 | listResolver(storeLists, source, args, { ...context, storeProduct: source.dataValues }, info) 34 | } 35 | }) 36 | }); 37 | 38 | export const StoreProductConnection = createConnection({ 39 | nodeType: GraphQLStoreProduct, 40 | name: 'storeProducts', 41 | target: db.storeProducts, 42 | before: (findOptions, args, context) => { 43 | findOptions.include = findOptions.include || []; 44 | findOptions.where = sequelizedWhere(findOptions.where, args.where); 45 | return findOptions; 46 | }, 47 | ...totalConnectionFields 48 | }); 49 | 50 | // queries on the storeProducts table 51 | export const storeProductQueries = { 52 | args: { 53 | id: { 54 | type: new GraphQLNonNull(GraphQLInt) 55 | } 56 | }, 57 | query: { 58 | type: GraphQLStoreProduct 59 | }, 60 | model: db.storeProducts 61 | }; 62 | 63 | // lists on the storeProducts table 64 | export const storeProductLists = { 65 | list: { 66 | ...StoreProductConnection, 67 | resolve: (...args) => baseListResolver(StoreProductConnection, ...args), 68 | type: StoreProductConnection.connectionType, 69 | args: StoreProductConnection.connectionArgs 70 | }, 71 | model: db.storeProducts 72 | }; 73 | 74 | export const storeProductMutations = { 75 | args: storeProductFields, 76 | type: GraphQLStoreProduct, 77 | model: db.storeProducts 78 | }; 79 | -------------------------------------------------------------------------------- /server/gql/models/stores/index.js: -------------------------------------------------------------------------------- 1 | import { GraphQLID, GraphQLInt, GraphQLNonNull, GraphQLObjectType, GraphQLString } from 'graphql'; 2 | import { createConnection } from 'graphql-sequelize'; 3 | import { productLists } from '../products'; 4 | import { addressLists } from '../addresses'; 5 | import { timestamps } from '@gql/fields/timestamps'; 6 | import { getNode } from '@gql/node'; 7 | import db from '@database/models'; 8 | import { totalConnectionFields, listResolver, baseListResolver } from '@utils/index'; 9 | import { sequelizedWhere } from '@database/dbUtils'; 10 | import { getQueryFields, TYPE_ATTRIBUTES } from '@server/utils/gqlFieldUtils'; 11 | 12 | const { nodeInterface } = getNode(); 13 | 14 | export const storeFields = { 15 | id: { type: new GraphQLNonNull(GraphQLID) }, 16 | name: { type: GraphQLString }, 17 | addressId: { type: new GraphQLNonNull(GraphQLInt) } 18 | }; 19 | 20 | export const GraphQLStore = new GraphQLObjectType({ 21 | name: 'Store', 22 | interfaces: [nodeInterface], 23 | fields: () => ({ 24 | ...getQueryFields(storeFields, TYPE_ATTRIBUTES.isNonNull), 25 | ...timestamps, 26 | addresses: { 27 | ...addressLists.list, 28 | resolve: (source, args, context, info) => 29 | listResolver(addressLists, source, args, { ...context, store: source.dataValues }, info) 30 | }, 31 | products: { 32 | ...productLists.list, 33 | resolve: (source, args, context, info) => 34 | listResolver(productLists, source, args, { ...context, store: source.dataValues }, info) 35 | } 36 | }) 37 | }); 38 | 39 | export const StoreConnection = createConnection({ 40 | nodeType: GraphQLStore, 41 | name: 'store', 42 | target: db.stores, 43 | before: (findOptions, args, context) => { 44 | findOptions.include = findOptions.include || []; 45 | if (context?.product?.id) { 46 | findOptions.include.push({ 47 | model: db.storeProducts, 48 | where: { 49 | productId: context.product.id 50 | } 51 | }); 52 | } 53 | if (context?.address?.id) { 54 | findOptions.include.push({ 55 | model: db.addresses, 56 | where: { 57 | id: context.address.id 58 | } 59 | }); 60 | } 61 | 62 | if (context?.storeProduct?.storeId) { 63 | findOptions.include.push({ 64 | model: db.storeProducts, 65 | where: { 66 | storeId: context.storeProduct.storeId 67 | } 68 | }); 69 | } 70 | findOptions.where = sequelizedWhere(findOptions.where, args.where); 71 | return findOptions; 72 | }, 73 | ...totalConnectionFields 74 | }); 75 | 76 | // queries on the suppliers table 77 | export const storeQueries = { 78 | args: { 79 | id: { 80 | type: new GraphQLNonNull(GraphQLID) 81 | } 82 | }, 83 | query: { 84 | type: GraphQLStore 85 | }, 86 | model: db.stores 87 | }; 88 | 89 | // queries on the suppliers table 90 | export const storeLists = { 91 | list: { 92 | ...StoreConnection, 93 | resolve: (...args) => baseListResolver(StoreConnection, ...args), 94 | type: StoreConnection.connectionType, 95 | args: StoreConnection.connectionArgs 96 | }, 97 | model: db.stores 98 | }; 99 | 100 | export const storeMutations = { 101 | args: storeFields, 102 | type: GraphQLStore, 103 | model: db.stores 104 | }; 105 | -------------------------------------------------------------------------------- /server/gql/models/supplierProducts/index.js: -------------------------------------------------------------------------------- 1 | import { GraphQLID, GraphQLInt, GraphQLNonNull, GraphQLObjectType } from 'graphql'; 2 | import { connectionArgs } from 'graphql-relay'; 3 | import { createConnection } from 'graphql-sequelize'; 4 | import { supplierLists } from '../suppliers'; 5 | import { timestamps } from '@gql/fields/timestamps'; 6 | import { getNode } from '@gql/node'; 7 | import db from '@database/models'; 8 | import { productLists } from '@gql/models/products'; 9 | import { totalConnectionFields, listResolver, baseListResolver } from '@utils/index'; 10 | import { sequelizedWhere } from '@database/dbUtils'; 11 | import { getQueryFields, TYPE_ATTRIBUTES } from '@server/utils/gqlFieldUtils'; 12 | 13 | const { nodeInterface } = getNode(); 14 | 15 | export const supplierProductFields = { 16 | id: { type: new GraphQLNonNull(GraphQLID) }, 17 | supplierId: { type: GraphQLInt }, 18 | productId: { type: GraphQLInt } 19 | }; 20 | export const GraphQLSupplierProduct = new GraphQLObjectType({ 21 | name: 'SupplierProduct', 22 | interfaces: [nodeInterface], 23 | args: connectionArgs, 24 | fields: () => ({ 25 | ...getQueryFields(supplierProductFields, TYPE_ATTRIBUTES.isNonNull), 26 | ...timestamps, 27 | products: { 28 | ...productLists.list, 29 | resolve: (source, args, context, info) => 30 | listResolver(productLists, source, args, { ...context, supplierProduct: source.dataValues }, info) 31 | }, 32 | suppliers: { 33 | ...supplierLists.list, 34 | resolve: (source, args, context, info) => 35 | listResolver(supplierLists, source, args, { ...context, supplierProduct: source.dataValues }, info) 36 | } 37 | }) 38 | }); 39 | 40 | export const SupplierProductConnection = createConnection({ 41 | nodeType: GraphQLSupplierProduct, 42 | name: 'supplierProducts', 43 | target: db.supplierProducts, 44 | before: (findOptions, args, context) => { 45 | findOptions.include = findOptions.include || []; 46 | findOptions.where = sequelizedWhere(findOptions.where, args.where); 47 | return findOptions; 48 | }, 49 | ...totalConnectionFields 50 | }); 51 | 52 | // queries on the product table 53 | export const supplierProductQueries = { 54 | args: { 55 | id: { 56 | type: new GraphQLNonNull(GraphQLInt) 57 | } 58 | }, 59 | query: { 60 | type: GraphQLSupplierProduct 61 | }, 62 | model: db.supplierProducts 63 | }; 64 | 65 | // lists on the product table 66 | export const supplierProductLists = { 67 | list: { 68 | ...SupplierProductConnection, 69 | resolve: (...args) => baseListResolver(SupplierProductConnection, ...args), 70 | type: SupplierProductConnection.connectionType, 71 | args: SupplierProductConnection.connectionArgs 72 | }, 73 | model: db.supplierProducts 74 | }; 75 | 76 | export const supplierProductMutations = { 77 | args: supplierProductFields, 78 | type: GraphQLSupplierProduct, 79 | model: db.supplierProducts 80 | }; 81 | -------------------------------------------------------------------------------- /server/gql/models/suppliers/index.js: -------------------------------------------------------------------------------- 1 | import { GraphQLID, GraphQLInt, GraphQLNonNull, GraphQLObjectType, GraphQLString } from 'graphql'; 2 | import { createConnection } from 'graphql-sequelize'; 3 | import { productLists } from '../products'; 4 | import { timestamps } from '@gql/fields/timestamps'; 5 | import { getNode } from '@gql/node'; 6 | import db from '@database/models'; 7 | import { addressLists } from '@gql/models/addresses'; 8 | import { totalConnectionFields, listResolver, baseListResolver } from '@utils/index'; 9 | import { sequelizedWhere } from '@database/dbUtils'; 10 | import { getQueryFields, TYPE_ATTRIBUTES } from '@server/utils/gqlFieldUtils'; 11 | 12 | const { nodeInterface } = getNode(); 13 | 14 | export const supplierFields = { 15 | id: { type: new GraphQLNonNull(GraphQLID) }, 16 | name: { type: GraphQLString }, 17 | addressId: { type: GraphQLInt } 18 | }; 19 | const GraphQLSupplier = new GraphQLObjectType({ 20 | name: 'Supplier', 21 | interfaces: [nodeInterface], 22 | 23 | sqlPaginate: true, 24 | fields: () => ({ 25 | ...getQueryFields(supplierFields, TYPE_ATTRIBUTES.isNonNull), 26 | ...timestamps, 27 | addresses: { 28 | ...addressLists.list, 29 | resolve: (source, args, context, info) => 30 | listResolver(addressLists, source, args, { ...context, supplier: source.dataValues }, info) 31 | }, 32 | products: { 33 | ...productLists.list, 34 | resolve: (source, args, context, info) => 35 | listResolver(productLists, source, args, { ...context, supplier: source.dataValues }, info) 36 | } 37 | }) 38 | }); 39 | 40 | export const SupplierConnection = createConnection({ 41 | nodeType: GraphQLSupplier, 42 | name: 'suppliers', 43 | target: db.suppliers, 44 | before: (findOptions, args, context) => { 45 | findOptions.include = findOptions.include || []; 46 | if (context?.product?.id) { 47 | findOptions.include.push({ 48 | model: db.supplierProducts, 49 | where: { 50 | productId: context.product.id 51 | } 52 | }); 53 | } 54 | 55 | if (context?.address?.id) { 56 | findOptions.include.push({ 57 | model: db.addresses, 58 | where: { 59 | id: context.address.id 60 | } 61 | }); 62 | } 63 | 64 | if (context?.supplierProduct?.supplierId) { 65 | findOptions.include.push({ 66 | model: db.supplierProducts, 67 | where: { 68 | supplierId: context.supplierProduct.supplierId 69 | } 70 | }); 71 | } 72 | findOptions.where = sequelizedWhere(findOptions.where, args.where); 73 | return findOptions; 74 | }, 75 | ...totalConnectionFields 76 | }); 77 | 78 | // queries on the suppliers table 79 | export const supplierQueries = { 80 | args: { 81 | id: { 82 | type: new GraphQLNonNull(GraphQLInt) 83 | } 84 | }, 85 | query: { 86 | type: GraphQLSupplier 87 | }, 88 | model: db.suppliers 89 | }; 90 | 91 | // lists on the suppliers table 92 | export const supplierLists = { 93 | list: { 94 | ...SupplierConnection, 95 | resolve: (...args) => baseListResolver(SupplierConnection, ...args), 96 | type: SupplierConnection.connectionType, 97 | args: SupplierConnection.connectionArgs 98 | }, 99 | model: db.suppliers 100 | }; 101 | 102 | export const supplierMutations = { 103 | args: supplierFields, 104 | type: GraphQLSupplier, 105 | model: db.suppliers 106 | }; 107 | -------------------------------------------------------------------------------- /server/gql/models/tests/addresses/addresses.test.js: -------------------------------------------------------------------------------- 1 | import get from 'lodash/get'; 2 | import { graphqlSync, GraphQLSchema } from 'graphql'; 3 | import { createFieldsWithType, expectSameTypeNameOrKind } from '@utils/testUtils'; 4 | import { QueryRoot } from '../../../queries'; 5 | import { MutationRoot } from '../../../mutations'; 6 | import { addressFields } from '@gql/models/addresses'; 7 | import { timestamps } from '@gql/fields/timestamps'; 8 | 9 | const schema = new GraphQLSchema({ query: QueryRoot, mutation: MutationRoot }); 10 | 11 | let fields = []; 12 | 13 | fields = createFieldsWithType({ ...addressFields, ...timestamps }); 14 | 15 | const query = ` 16 | { 17 | __type(name: "Address") { 18 | name 19 | kind 20 | fields { 21 | name 22 | type { 23 | name 24 | kind 25 | } 26 | } 27 | } 28 | } 29 | `; 30 | describe('Address introspection tests', () => { 31 | it('should have the correct fields and types', async () => { 32 | const result = await graphqlSync({ schema, source: query }); 33 | const addressFieldTypes = get(result, 'data.__type.fields'); 34 | const hasCorrectFieldTypes = expectSameTypeNameOrKind(addressFieldTypes, fields); 35 | expect(hasCorrectFieldTypes).toBeTruthy(); 36 | }); 37 | it('should have a supplier connection', async () => { 38 | const result = await graphqlSync({ schema, source: query }); 39 | const addressFieldTypes = get(result, 'data.__type.fields'); 40 | const supplierField = addressFieldTypes.find(field => field.name === 'suppliers'); 41 | expect(supplierField.type.kind).toBe('OBJECT'); 42 | expect(supplierField.type.name).toBe('suppliersConnection'); 43 | }); 44 | it('should have a store connection', async () => { 45 | const result = await graphqlSync({ schema, source: query }); 46 | const addressFieldTypes = get(result, 'data.__type.fields'); 47 | const storeField = addressFieldTypes.find(field => field.name === 'stores'); 48 | expect(storeField.type.kind).toBe('OBJECT'); 49 | expect(storeField.type.name).toBe('storeConnection'); 50 | }); 51 | }); 52 | -------------------------------------------------------------------------------- /server/gql/models/tests/addresses/pagination.test.js: -------------------------------------------------------------------------------- 1 | import get from 'lodash/get'; 2 | import { getResponse, resetAndMockDB } from '@server/utils/testUtils'; 3 | import { addressesTable } from '@server/utils/testUtils/mockData'; 4 | 5 | describe('Address graphQL-server-DB mutation tests', () => { 6 | const addressesQuery = ` 7 | query { 8 | addresses (first: 1, limit: 1, offset: 0){ 9 | edges { 10 | node { 11 | id 12 | address1 13 | address2 14 | stores (first: 10) { 15 | edges { 16 | node { 17 | id 18 | } 19 | } 20 | } 21 | suppliers (first: 10) { 22 | edges { 23 | node { 24 | id 25 | } 26 | } 27 | } 28 | } 29 | } 30 | pageInfo { 31 | hasNextPage 32 | hasPreviousPage 33 | startCursor 34 | endCursor 35 | } 36 | total 37 | } 38 | } 39 | `; 40 | 41 | it('should have a query to get the addresses', async () => { 42 | resetAndMockDB(null, {}); 43 | await getResponse(addressesQuery).then(response => { 44 | const result = get(response, 'body.data.addresses.edges[0].node'); 45 | expect(result).toEqual( 46 | expect.objectContaining({ 47 | id: addressesTable[0].id, 48 | address1: addressesTable[0].address1, 49 | address2: addressesTable[0].address2 50 | }) 51 | ); 52 | }); 53 | }); 54 | it('should have the correct pageInfo', async () => { 55 | await getResponse(addressesQuery).then(response => { 56 | const result = get(response, 'body.data.addresses.pageInfo'); 57 | expect(result).toEqual( 58 | expect.objectContaining({ 59 | hasNextPage: true, 60 | hasPreviousPage: false 61 | }) 62 | ); 63 | }); 64 | }); 65 | }); 66 | -------------------------------------------------------------------------------- /server/gql/models/tests/addresses/query.test.js: -------------------------------------------------------------------------------- 1 | import get from 'lodash/get'; 2 | import { getResponse, mockDBClient, resetAndMockDB } from '@utils/testUtils'; 3 | import { storesTable, suppliersTable } from '@utils/testUtils/mockData'; 4 | 5 | describe('Address graphQL-server-DB query tests', () => { 6 | const id = 1; 7 | const addressOne = ` 8 | query { 9 | address (id: ${id}) { 10 | id 11 | address1 12 | stores (first: 10) { 13 | edges { 14 | node { 15 | id 16 | } 17 | } 18 | } 19 | suppliers (first: 10) { 20 | edges { 21 | node { 22 | id 23 | } 24 | } 25 | } 26 | } 27 | } 28 | `; 29 | 30 | it('should request for suppliers and stores related to the address', async () => { 31 | const dbClient = mockDBClient(); 32 | resetAndMockDB(null, {}, dbClient); 33 | 34 | jest.spyOn(dbClient.models.suppliers, 'findAll').mockImplementation(() => [suppliersTable[0]]); 35 | 36 | jest.spyOn(dbClient.models.stores, 'findAll').mockImplementation(() => [storesTable[0]]); 37 | 38 | await getResponse(addressOne).then(response => { 39 | expect(get(response, 'body.data.address')).toBeTruthy(); 40 | // check if suppliers.findAll is being called once 41 | expect(dbClient.models.suppliers.findAll.mock.calls.length).toBe(1); 42 | // check if suppliers.findAll is being called with the correct whereclause 43 | expect(dbClient.models.suppliers.findAll.mock.calls[0][0].include[0].where).toEqual({ id }); 44 | // check if the included model has name: addresses 45 | expect(dbClient.models.suppliers.findAll.mock.calls[0][0].include[0].model.name).toEqual('addresses'); 46 | 47 | expect(dbClient.models.stores.findAll.mock.calls.length).toBe(1); 48 | expect(dbClient.models.stores.findAll.mock.calls[0][0].include[0].where).toEqual({ id }); 49 | expect(dbClient.models.stores.findAll.mock.calls[0][0].include[0].model.name).toEqual('addresses'); 50 | }); 51 | }); 52 | }); 53 | -------------------------------------------------------------------------------- /server/gql/models/tests/products/mutation.test.js: -------------------------------------------------------------------------------- 1 | import get from 'lodash/get'; 2 | import { getResponse } from '@utils/testUtils'; 3 | 4 | describe('Product graphQL-server-DB mutation tests', () => { 5 | const createProductMut = ` 6 | mutation { 7 | createProduct ( 8 | name: "New produce" 9 | amount: 10 10 | ) { 11 | id 12 | name 13 | amount 14 | createdAt 15 | updatedAt 16 | deletedAt 17 | suppliers (first: 10) { 18 | edges { 19 | node { 20 | id 21 | } 22 | } 23 | } 24 | stores (first: 10) { 25 | edges { 26 | node { 27 | id 28 | } 29 | } 30 | } 31 | } 32 | } 33 | `; 34 | 35 | it('should have a mutation to create a new product', async () => { 36 | await getResponse(createProductMut).then(response => { 37 | const result = get(response, 'body.data.createProduct'); 38 | expect(result).toMatchObject({ 39 | id: '1', 40 | name: 'New produce', 41 | amount: 10 42 | }); 43 | }); 44 | }); 45 | 46 | const deleteProductMut = ` 47 | mutation { 48 | deleteProduct ( 49 | id: 1 50 | ) { 51 | id 52 | } 53 | } 54 | `; 55 | 56 | it('should have a mutation to delete a product', async () => { 57 | await getResponse(deleteProductMut).then(response => { 58 | const result = get(response, 'body.data.deleteProduct'); 59 | expect(result).toEqual( 60 | expect.objectContaining({ 61 | id: 1 62 | }) 63 | ); 64 | }); 65 | }); 66 | }); 67 | -------------------------------------------------------------------------------- /server/gql/models/tests/products/pagination.test.js: -------------------------------------------------------------------------------- 1 | import get from 'lodash/get'; 2 | import { productsTable } from '@server/utils/testUtils/mockData'; 3 | import { getResponse } from '@utils/testUtils'; 4 | 5 | describe('Products graphQL-server-DB pagination tests', () => { 6 | const productsQuery = ` 7 | query { 8 | products (first: 1, limit: 1, offset: 0){ 9 | edges { 10 | node { 11 | id 12 | name 13 | category 14 | amount 15 | } 16 | } 17 | pageInfo { 18 | hasNextPage 19 | hasPreviousPage 20 | startCursor 21 | endCursor 22 | } 23 | total 24 | } 25 | } 26 | `; 27 | 28 | it('should have a query to get the products', async () => { 29 | await getResponse(productsQuery).then(response => { 30 | const result = get(response, 'body.data.products.edges[0].node'); 31 | expect(result).toEqual( 32 | expect.objectContaining({ 33 | id: productsTable[0].id, 34 | name: productsTable[0].name, 35 | category: productsTable[0].category, 36 | amount: Math.floor(productsTable[0].amount) 37 | }) 38 | ); 39 | }); 40 | }); 41 | 42 | it('should have the correct pageInfo', async () => { 43 | await getResponse(productsQuery).then(response => { 44 | const result = get(response, 'body.data.products.pageInfo'); 45 | expect(result).toEqual( 46 | expect.objectContaining({ 47 | hasNextPage: true, 48 | hasPreviousPage: false 49 | }) 50 | ); 51 | }); 52 | }); 53 | }); 54 | -------------------------------------------------------------------------------- /server/gql/models/tests/products/products.test.js: -------------------------------------------------------------------------------- 1 | import get from 'lodash/get'; 2 | import { graphqlSync, GraphQLSchema } from 'graphql'; 3 | import { createFieldsWithType, expectSameTypeNameOrKind } from '@utils/testUtils'; 4 | import { QueryRoot } from '../../../queries'; 5 | import { MutationRoot } from '../../../mutations'; 6 | import { timestamps } from '@gql/fields/timestamps'; 7 | import { productFields } from '@gql/models/products'; 8 | 9 | const schema = new GraphQLSchema({ query: QueryRoot, mutation: MutationRoot }); 10 | 11 | let fields = []; 12 | 13 | fields = createFieldsWithType({ ...productFields, ...timestamps }); 14 | 15 | const query = ` 16 | { 17 | __type(name: "Product") { 18 | name 19 | kind 20 | fields { 21 | name 22 | type { 23 | name 24 | kind 25 | } 26 | } 27 | } 28 | } 29 | `; 30 | describe('Product introspection tests', () => { 31 | it('should have the correct fields and types', async () => { 32 | const result = await graphqlSync({ schema, source: query }); 33 | const productFieldTypes = get(result, 'data.__type.fields'); 34 | const hasCorrectFieldTypes = expectSameTypeNameOrKind(productFieldTypes, fields); 35 | expect(hasCorrectFieldTypes).toBeTruthy(); 36 | }); 37 | it('should have a supplier connection', async () => { 38 | const result = await graphqlSync({ schema, source: query }); 39 | const addressFieldTypes = get(result, 'data.__type.fields'); 40 | const supplierField = addressFieldTypes.find(field => field.name === 'suppliers'); 41 | expect(supplierField.type.kind).toBe('OBJECT'); 42 | expect(supplierField.type.name).toBe('suppliersConnection'); 43 | }); 44 | it('should have a store connection', async () => { 45 | const result = await graphqlSync({ schema, source: query }); 46 | const addressFieldTypes = get(result, 'data.__type.fields'); 47 | const storeField = addressFieldTypes.find(field => field.name === 'stores'); 48 | expect(storeField.type.kind).toBe('OBJECT'); 49 | expect(storeField.type.name).toBe('storeConnection'); 50 | }); 51 | }); 52 | -------------------------------------------------------------------------------- /server/gql/models/tests/products/query.test.js: -------------------------------------------------------------------------------- 1 | import get from 'lodash/get'; 2 | import { storesTable, suppliersTable } from '@server/utils/testUtils/mockData'; 3 | import { getResponse, mockDBClient, resetAndMockDB } from '@utils/testUtils'; 4 | 5 | describe('Product graphQL-server-DB query tests', () => { 6 | const id = 1; 7 | const productQuery = ` 8 | query { 9 | product (id: ${id}) { 10 | id 11 | name 12 | suppliers (first: 10) { 13 | edges { 14 | node { 15 | id 16 | } 17 | } 18 | } 19 | stores (first: 10) { 20 | edges { 21 | node { 22 | id 23 | } 24 | } 25 | } 26 | } 27 | } 28 | `; 29 | it('should request for suppliers and stores related to the product', async () => { 30 | const dbClient = mockDBClient(); 31 | resetAndMockDB(null, {}, dbClient); 32 | 33 | jest.spyOn(dbClient.models.suppliers, 'findAll').mockImplementation(() => [suppliersTable[0]]); 34 | 35 | jest.spyOn(dbClient.models.stores, 'findAll').mockImplementation(() => [storesTable[0]]); 36 | 37 | await getResponse(productQuery).then(response => { 38 | expect(get(response, 'body.data.product')).toBeTruthy(); 39 | // check if suppliers.findAll is being called once 40 | expect(dbClient.models.suppliers.findAll.mock.calls.length).toBe(1); 41 | // check if suppliers.findAll is being called with the correct whereclause 42 | expect(dbClient.models.suppliers.findAll.mock.calls[0][0].include[0].where).toEqual({ productId: id }); 43 | // check if the included model has name: supplier_products 44 | expect(dbClient.models.suppliers.findAll.mock.calls[0][0].include[0].model.name).toEqual('supplier_products'); 45 | 46 | expect(dbClient.models.stores.findAll.mock.calls.length).toBe(1); 47 | expect(dbClient.models.stores.findAll.mock.calls[0][0].include[0].where).toEqual({ productId: id }); 48 | expect(dbClient.models.stores.findAll.mock.calls[0][0].include[0].model.name).toEqual('store_products'); 49 | }); 50 | }); 51 | }); 52 | -------------------------------------------------------------------------------- /server/gql/models/tests/purchasedProducts/mutation.test.js: -------------------------------------------------------------------------------- 1 | import get from 'lodash/get'; 2 | import { getResponse } from '@utils/testUtils'; 3 | 4 | describe('purchased_products graphQL-server-DB mutation tests', () => { 5 | const deletePurchasedProductMut = ` 6 | mutation { 7 | deletePurchasedProduct ( 8 | id: 1 9 | ) { 10 | id 11 | } 12 | } 13 | `; 14 | 15 | it('should have a mutation to delete a purchased product', async () => { 16 | await getResponse(deletePurchasedProductMut).then(response => { 17 | const result = get(response, 'body.data.deletePurchasedProduct'); 18 | expect(result).toEqual( 19 | expect.objectContaining({ 20 | id: 1 21 | }) 22 | ); 23 | }); 24 | }); 25 | }); 26 | -------------------------------------------------------------------------------- /server/gql/models/tests/purchasedProducts/pagination.test.js: -------------------------------------------------------------------------------- 1 | import get from 'lodash/get'; 2 | import { purchasedProductsTable } from '@server/utils/testUtils/mockData'; 3 | import { getResponse, resetAndMockDB } from '@utils/testUtils'; 4 | 5 | describe('PurchasedProducts graphQL-server-DB pagination tests', () => { 6 | const purchasedProductsQuery = ` 7 | query { 8 | purchasedProducts (first: 1, limit: 1, offset: 0){ 9 | edges { 10 | node { 11 | id 12 | price 13 | discount 14 | products (first: 10) { 15 | edges { 16 | node { 17 | id 18 | } 19 | } 20 | } 21 | } 22 | } 23 | pageInfo { 24 | hasNextPage 25 | hasPreviousPage 26 | startCursor 27 | endCursor 28 | } 29 | total 30 | } 31 | } 32 | `; 33 | 34 | it('should have a query to get the products', async () => { 35 | resetAndMockDB(); 36 | await getResponse(purchasedProductsQuery).then(response => { 37 | const result = get(response, 'body.data.purchasedProducts.edges[0].node'); 38 | expect(result).toEqual( 39 | expect.objectContaining({ 40 | id: purchasedProductsTable[0].id, 41 | price: Math.floor(purchasedProductsTable[0].price), 42 | discount: Math.floor(purchasedProductsTable[0].discount) 43 | }) 44 | ); 45 | }); 46 | }); 47 | 48 | it('should have the correct pageInfo', async () => { 49 | resetAndMockDB(); 50 | await getResponse(purchasedProductsQuery).then(response => { 51 | const result = get(response, 'body.data.purchasedProducts.pageInfo'); 52 | expect(result).toEqual( 53 | expect.objectContaining({ 54 | hasNextPage: true, 55 | hasPreviousPage: false 56 | }) 57 | ); 58 | }); 59 | }); 60 | }); 61 | -------------------------------------------------------------------------------- /server/gql/models/tests/purchasedProducts/purchasedProducts.test.js: -------------------------------------------------------------------------------- 1 | import get from 'lodash/get'; 2 | import { graphqlSync, GraphQLSchema } from 'graphql'; 3 | import { createFieldsWithType, expectSameTypeNameOrKind } from '@utils/testUtils'; 4 | import { QueryRoot } from '../../../queries'; 5 | import { MutationRoot } from '../../../mutations'; 6 | import { timestamps } from '@gql/fields/timestamps'; 7 | import { purchasedProductFields } from '@gql/models/purchasedProducts'; 8 | 9 | const schema = new GraphQLSchema({ query: QueryRoot, mutation: MutationRoot }); 10 | 11 | let fields = []; 12 | 13 | fields = createFieldsWithType({ ...purchasedProductFields, ...timestamps }); 14 | 15 | const query = ` 16 | { 17 | __type(name: "PurchasedProduct") { 18 | name 19 | kind 20 | fields { 21 | name 22 | type { 23 | name 24 | kind 25 | } 26 | } 27 | } 28 | } 29 | `; 30 | describe('Purchased Product introspection tests', () => { 31 | it('should have the correct fields and types', async () => { 32 | const result = await graphqlSync({ schema, source: query }); 33 | const purchasedProductFieldTypes = get(result, 'data.__type.fields'); 34 | const hasCorrectFieldTypes = expectSameTypeNameOrKind(purchasedProductFieldTypes, fields); 35 | expect(hasCorrectFieldTypes).toBeTruthy(); 36 | }); 37 | 38 | it('should have a product field of type Product', async () => { 39 | const result = await graphqlSync({ schema, source: query }); 40 | const purchasedProductFieldTypes = get(result, 'data.__type.fields'); 41 | const productField = purchasedProductFieldTypes.find(field => field.name === 'products'); 42 | expect(productField.type.name).toBe('productsConnection'); 43 | expect(productField.type.kind).toBe('OBJECT'); 44 | }); 45 | }); 46 | -------------------------------------------------------------------------------- /server/gql/models/tests/purchasedProducts/query.test.js: -------------------------------------------------------------------------------- 1 | import get from 'lodash/get'; 2 | import { getResponse, mockDBClient, resetAndMockDB } from '@utils/testUtils'; 3 | import { productsTable } from '@utils/testUtils/mockData'; 4 | import { MAX_PAGE_SIZE } from '@server/utils/constants'; 5 | 6 | describe('purchased_product graphQL-server-DB query tests', () => { 7 | const id = 1; 8 | const purchasedProductPrice = ` 9 | query { 10 | purchasedProduct (id: ${id}) { 11 | id 12 | price 13 | products (first: ${MAX_PAGE_SIZE}) { 14 | edges { 15 | node { 16 | id 17 | } 18 | } 19 | } 20 | } 21 | } 22 | `; 23 | 24 | it('should request for products related to the purchasedProducts', async () => { 25 | const dbClient = mockDBClient(); 26 | resetAndMockDB(null, {}, dbClient); 27 | 28 | jest.spyOn(dbClient.models.products, 'findAll').mockImplementation(() => [productsTable[0]]); 29 | 30 | await getResponse(purchasedProductPrice).then(response => { 31 | expect(get(response, 'body.data.purchasedProduct')).toBeTruthy(); 32 | 33 | // check if products.findAll is being called once 34 | expect(dbClient.models.products.findAll.mock.calls.length).toBe(1); 35 | // check if products.findAll is being called with the correct whereclause 36 | expect(dbClient.models.products.findAll.mock.calls[0][0].include[0].where).toEqual({ id }); 37 | // check if the included model has name: purchased_products 38 | expect(dbClient.models.products.findAll.mock.calls[0][0].include[0].model.name).toEqual('purchased_products'); 39 | }); 40 | }); 41 | }); 42 | -------------------------------------------------------------------------------- /server/gql/models/tests/storeProducts/mutation.test.js: -------------------------------------------------------------------------------- 1 | import get from 'lodash/get'; 2 | import { getResponse } from '@utils/testUtils'; 3 | 4 | describe('store_products graphQL-server-DB mutation tests', () => { 5 | const createStoreProductMut = ` 6 | mutation { 7 | createStoreProduct ( 8 | productId: 1 9 | storeId: 1 10 | ) { 11 | id 12 | productId 13 | storeId 14 | createdAt 15 | updatedAt 16 | deletedAt 17 | } 18 | } 19 | `; 20 | 21 | it('should have a mutation to create a new store product', async () => { 22 | await getResponse(createStoreProductMut).then(response => { 23 | const result = get(response, 'body.data.createStoreProduct'); 24 | expect(result).toMatchObject({ 25 | id: '1', 26 | productId: 1, 27 | storeId: 1 28 | }); 29 | }); 30 | }); 31 | 32 | const deleteStoreProductMut = ` 33 | mutation { 34 | deleteStoreProduct ( 35 | id: 1 36 | ) { 37 | id 38 | } 39 | } 40 | `; 41 | 42 | it('should have a mutation to delete a store product', async () => { 43 | await getResponse(deleteStoreProductMut).then(response => { 44 | const result = get(response, 'body.data.deleteStoreProduct'); 45 | expect(result).toEqual( 46 | expect.objectContaining({ 47 | id: 1 48 | }) 49 | ); 50 | }); 51 | }); 52 | }); 53 | -------------------------------------------------------------------------------- /server/gql/models/tests/storeProducts/pagination.test.js: -------------------------------------------------------------------------------- 1 | import get from 'lodash/get'; 2 | import { storeProductsTable } from '@server/utils/testUtils/mockData'; 3 | import { getResponse } from '@utils/testUtils'; 4 | 5 | describe('StoresProducts graphQL-server-DB pagination tests', () => { 6 | const storeProductsQuery = ` 7 | query { 8 | storeProducts (first: 1, limit: 1, offset: 0){ 9 | edges { 10 | node { 11 | id 12 | productId 13 | storeId 14 | stores (first: 10) { 15 | edges { 16 | node { 17 | id 18 | } 19 | } 20 | } 21 | products (first: 10) { 22 | edges { 23 | node { 24 | id 25 | } 26 | } 27 | } 28 | } 29 | } 30 | pageInfo { 31 | hasNextPage 32 | hasPreviousPage 33 | startCursor 34 | endCursor 35 | } 36 | total 37 | } 38 | } 39 | `; 40 | 41 | it('should have a query to get the storeProducts', async () => { 42 | await getResponse(storeProductsQuery).then(response => { 43 | const result = get(response, 'body.data.storeProducts.edges[0].node'); 44 | expect(result).toEqual( 45 | expect.objectContaining({ 46 | id: storeProductsTable[0].id, 47 | productId: storeProductsTable[0].productId, 48 | storeId: storeProductsTable[0].storeId 49 | }) 50 | ); 51 | }); 52 | }); 53 | 54 | it('should have the correct pageInfo', async () => { 55 | await getResponse(storeProductsQuery).then(response => { 56 | const result = get(response, 'body.data.storeProducts.pageInfo'); 57 | expect(result).toEqual( 58 | expect.objectContaining({ 59 | hasNextPage: true, 60 | hasPreviousPage: false 61 | }) 62 | ); 63 | }); 64 | }); 65 | }); 66 | -------------------------------------------------------------------------------- /server/gql/models/tests/storeProducts/query.test.js: -------------------------------------------------------------------------------- 1 | import get from 'lodash/get'; 2 | import { productsTable, storesTable } from '@server/utils/testUtils/mockData'; 3 | import { getResponse, mockDBClient, resetAndMockDB } from '@utils/testUtils'; 4 | 5 | describe('store_product graphQL-server-DB query tests', () => { 6 | const id = 1; 7 | const storeProductStoreId = ` 8 | query { 9 | storeProduct (id: ${id}) { 10 | id 11 | storeId 12 | stores (first: 10) { 13 | edges { 14 | node { 15 | id 16 | } 17 | } 18 | } 19 | products (first: 10) { 20 | edges { 21 | node { 22 | id 23 | } 24 | } 25 | } 26 | } 27 | } 28 | `; 29 | 30 | it('should request for stores and products related to the storeProducts', async () => { 31 | const dbClient = mockDBClient(); 32 | resetAndMockDB(null, {}, dbClient); 33 | 34 | jest.spyOn(dbClient.models.stores, 'findAll').mockImplementation(() => [storesTable[0]]); 35 | 36 | jest.spyOn(dbClient.models.products, 'findAll').mockImplementation(() => [productsTable[0]]); 37 | 38 | await getResponse(storeProductStoreId).then(response => { 39 | expect(get(response, 'body.data.storeProduct')).toBeTruthy(); 40 | 41 | // check if stores.findAll is being called once 42 | expect(dbClient.models.stores.findAll.mock.calls.length).toBe(1); 43 | // check if stores.findAll is being called with the correct whereclause 44 | expect(dbClient.models.stores.findAll.mock.calls[0][0].include[0].where).toEqual({ storeId: id }); 45 | // check if the included model has name: store_products 46 | expect(dbClient.models.stores.findAll.mock.calls[0][0].include[0].model.name).toEqual('store_products'); 47 | 48 | expect(dbClient.models.products.findAll.mock.calls.length).toBe(1); 49 | expect(dbClient.models.products.findAll.mock.calls[0][0].include[0].where).toEqual({ productId: id }); 50 | expect(dbClient.models.products.findAll.mock.calls[0][0].include[0].model.name).toEqual('store_products'); 51 | }); 52 | }); 53 | }); 54 | -------------------------------------------------------------------------------- /server/gql/models/tests/storeProducts/storeProducts.test.js: -------------------------------------------------------------------------------- 1 | import get from 'lodash/get'; 2 | import { graphqlSync, GraphQLSchema } from 'graphql'; 3 | import { createFieldsWithType, expectSameTypeNameOrKind } from '@utils/testUtils'; 4 | import { QueryRoot } from '../../../queries'; 5 | import { MutationRoot } from '../../../mutations'; 6 | import { timestamps } from '@gql/fields/timestamps'; 7 | import { storeProductFields } from '@gql/models/storeProducts'; 8 | 9 | const schema = new GraphQLSchema({ query: QueryRoot, mutation: MutationRoot }); 10 | 11 | let fields = []; 12 | 13 | fields = createFieldsWithType({ ...storeProductFields, ...timestamps }); 14 | 15 | const query = ` 16 | { 17 | __type(name: "StoreProduct") { 18 | name 19 | kind 20 | fields { 21 | name 22 | type { 23 | name 24 | kind 25 | } 26 | } 27 | } 28 | } 29 | `; 30 | describe('Store Product introspection tests', () => { 31 | it('should have the correct fields and types', async () => { 32 | const result = await graphqlSync({ schema, source: query }); 33 | const storeProductFieldTypes = get(result, 'data.__type.fields'); 34 | const hasCorrectFieldTypes = expectSameTypeNameOrKind(storeProductFieldTypes, fields); 35 | expect(hasCorrectFieldTypes).toBeTruthy(); 36 | }); 37 | 38 | it('should have a product field of type Product', async () => { 39 | const result = await graphqlSync({ schema, source: query }); 40 | const purchasedProductFieldTypes = get(result, 'data.__type.fields'); 41 | const productField = purchasedProductFieldTypes.find(field => field.name === 'products'); 42 | expect(productField.type.name).toBe('productsConnection'); 43 | expect(productField.type.kind).toBe('OBJECT'); 44 | }); 45 | 46 | it('should have a store field of type Store', async () => { 47 | const result = await graphqlSync({ schema, source: query }); 48 | const purchasedProductFieldTypes = get(result, 'data.__type.fields'); 49 | const productField = purchasedProductFieldTypes.find(field => field.name === 'stores'); 50 | expect(productField.type.name).toBe('storeConnection'); 51 | expect(productField.type.kind).toBe('OBJECT'); 52 | }); 53 | }); 54 | -------------------------------------------------------------------------------- /server/gql/models/tests/stores/mutation.test.js: -------------------------------------------------------------------------------- 1 | import get from 'lodash/get'; 2 | import { getResponse } from '@utils/testUtils'; 3 | 4 | describe('store graphQL-server-DB mutation tests', () => { 5 | const createStoreMut = ` 6 | mutation { 7 | createStore ( 8 | name: "new store name" 9 | addressId: 1 10 | ) { 11 | id 12 | name 13 | addressId 14 | createdAt 15 | updatedAt 16 | deletedAt 17 | } 18 | } 19 | `; 20 | 21 | it('should have a mutation to create a new store', async () => { 22 | await getResponse(createStoreMut).then(response => { 23 | const result = get(response, 'body.data.createStore'); 24 | expect(result).toMatchObject({ 25 | id: '1', 26 | name: 'new store name', 27 | addressId: 1 28 | }); 29 | }); 30 | }); 31 | 32 | const deleteStoreMut = ` 33 | mutation { 34 | deleteStore ( 35 | id: 1 36 | ) { 37 | id 38 | } 39 | } 40 | `; 41 | 42 | it('should have a mutation to delete a store', async () => { 43 | await getResponse(deleteStoreMut).then(response => { 44 | const result = get(response, 'body.data.deleteStore'); 45 | expect(result).toEqual( 46 | expect.objectContaining({ 47 | id: 1 48 | }) 49 | ); 50 | }); 51 | }); 52 | }); 53 | -------------------------------------------------------------------------------- /server/gql/models/tests/stores/pagination.test.js: -------------------------------------------------------------------------------- 1 | import get from 'lodash/get'; 2 | import { storesTable } from '@server/utils/testUtils/mockData'; 3 | import { getResponse } from '@utils/testUtils'; 4 | 5 | describe('Stores graphQL-server-DB pagination tests', () => { 6 | const storesQuery = ` 7 | query { 8 | stores (first: 1, limit: 1, offset: 0){ 9 | edges { 10 | node { 11 | id 12 | name 13 | addressId 14 | addresses (first: 10) { 15 | edges { 16 | node { 17 | id 18 | } 19 | } 20 | } 21 | products (first: 10) { 22 | edges { 23 | node { 24 | id 25 | } 26 | } 27 | } 28 | } 29 | } 30 | pageInfo { 31 | hasNextPage 32 | hasPreviousPage 33 | startCursor 34 | endCursor 35 | } 36 | total 37 | } 38 | } 39 | `; 40 | 41 | it('should have a query to get the Stores', async () => { 42 | const mockDBClient = require('@database'); 43 | const client = mockDBClient.client; 44 | client.$queueQueryResult([ 45 | {}, 46 | { 47 | rows: [{ ...storesTable[0], $total: 10 }] 48 | } 49 | ]); 50 | jest.doMock('@database', () => ({ client, getClient: () => client })); 51 | await getResponse(storesQuery).then(response => { 52 | const result = get(response, 'body.data.stores.edges[0].node'); 53 | expect(result).toEqual( 54 | expect.objectContaining({ 55 | id: storesTable[0].id, 56 | name: storesTable[0].name, 57 | addressId: storesTable[0].addressId 58 | }) 59 | ); 60 | }); 61 | }); 62 | 63 | it('should have the correct pageInfo', async () => { 64 | const mockDBClient = require('@database'); 65 | const client = mockDBClient.client; 66 | client.$queueQueryResult([ 67 | {}, 68 | { 69 | rows: [{ ...storesTable, $total: 10 }] 70 | } 71 | ]); 72 | jest.doMock('@database', () => ({ client, getClient: () => client })); 73 | await getResponse(storesQuery).then(response => { 74 | const result = get(response, 'body.data.stores.pageInfo'); 75 | expect(result).toEqual( 76 | expect.objectContaining({ 77 | startCursor: 'W251bGwsMF0=', 78 | endCursor: 'W251bGwsMF0=', 79 | hasNextPage: true, 80 | hasPreviousPage: false 81 | }) 82 | ); 83 | }); 84 | }); 85 | }); 86 | -------------------------------------------------------------------------------- /server/gql/models/tests/stores/query.test.js: -------------------------------------------------------------------------------- 1 | import get from 'lodash/get'; 2 | import { productsTable } from '@server/utils/testUtils/mockData'; 3 | import { getResponse, mockDBClient, resetAndMockDB } from '@utils/testUtils'; 4 | 5 | describe('store graphQL-server-DB query tests', () => { 6 | const id = '1'; 7 | const storeName = ` 8 | query { 9 | store (id: ${id}) { 10 | id 11 | name 12 | products (first: 10) { 13 | edges { 14 | node { 15 | id 16 | } 17 | } 18 | } 19 | } 20 | } 21 | `; 22 | 23 | it('should request for products related to the stores', async () => { 24 | const dbClient = mockDBClient(); 25 | resetAndMockDB(null, {}, dbClient); 26 | 27 | jest.spyOn(dbClient.models.products, 'findAll').mockImplementation(() => [productsTable[0]]); 28 | 29 | await getResponse(storeName).then(response => { 30 | expect(get(response, 'body.data.store')).toBeTruthy(); 31 | 32 | expect(dbClient.models.products.findAll.mock.calls.length).toBe(1); 33 | expect(dbClient.models.products.findAll.mock.calls[0][0].include[0].where).toEqual({ id }); 34 | expect(dbClient.models.products.findAll.mock.calls[0][0].include[0].model.name).toEqual('stores'); 35 | }); 36 | }); 37 | }); 38 | -------------------------------------------------------------------------------- /server/gql/models/tests/stores/stores.test.js: -------------------------------------------------------------------------------- 1 | import get from 'lodash/get'; 2 | import { graphqlSync, GraphQLSchema } from 'graphql'; 3 | import { createFieldsWithType, expectSameTypeNameOrKind } from '@utils/testUtils'; 4 | import { QueryRoot } from '../../../queries'; 5 | import { MutationRoot } from '../../../mutations'; 6 | import { timestamps } from '@gql/fields/timestamps'; 7 | import { storeFields } from '@gql/models/stores'; 8 | 9 | const schema = new GraphQLSchema({ query: QueryRoot, mutation: MutationRoot }); 10 | 11 | let fields = []; 12 | 13 | fields = createFieldsWithType({ ...storeFields, ...timestamps }); 14 | 15 | const query = ` 16 | { 17 | __type(name: "Store") { 18 | name 19 | kind 20 | fields { 21 | name 22 | type { 23 | name 24 | kind 25 | } 26 | } 27 | } 28 | } 29 | `; 30 | describe('Store introspection tests', () => { 31 | it('should have the correct fields and types', async () => { 32 | const result = await graphqlSync({ schema, source: query }); 33 | const storeFieldTypes = get(result, 'data.__type.fields'); 34 | const hasCorrectFieldTypes = expectSameTypeNameOrKind(storeFieldTypes, fields); 35 | expect(hasCorrectFieldTypes).toBeTruthy(); 36 | }); 37 | it('should have a store connection', async () => { 38 | const result = await graphqlSync({ schema, source: query }); 39 | const addressFieldTypes = get(result, 'data.__type.fields'); 40 | const storeField = addressFieldTypes.find(field => field.name === 'products'); 41 | expect(storeField.type.kind).toBe('OBJECT'); 42 | expect(storeField.type.name).toBe('productsConnection'); 43 | }); 44 | }); 45 | -------------------------------------------------------------------------------- /server/gql/models/tests/supplierProducts/mutation.test.js: -------------------------------------------------------------------------------- 1 | import get from 'lodash/get'; 2 | import { getResponse } from '@utils/testUtils'; 3 | 4 | describe('supplier_product graphQL-server-DB mutation tests', () => { 5 | const createSupplierProductMut = ` 6 | mutation { 7 | createSupplierProduct ( 8 | productId: 1 9 | supplierId: 1 10 | ) { 11 | id 12 | productId 13 | supplierId 14 | createdAt 15 | updatedAt 16 | deletedAt 17 | } 18 | } 19 | `; 20 | 21 | it('should have a mutation to create a new supplier Product', async () => { 22 | await getResponse(createSupplierProductMut).then(response => { 23 | const result = get(response, 'body.data.createSupplierProduct'); 24 | expect(result).toMatchObject({ 25 | id: '1', 26 | productId: 1, 27 | supplierId: 1 28 | }); 29 | }); 30 | }); 31 | 32 | const deleteSupplierProductMut = ` 33 | mutation { 34 | deleteSupplierProduct ( 35 | id: 1 36 | ) { 37 | id 38 | } 39 | } 40 | `; 41 | 42 | it('should have a mutation to delete a supplier product', async () => { 43 | await getResponse(deleteSupplierProductMut).then(response => { 44 | const result = get(response, 'body.data.deleteSupplierProduct'); 45 | 46 | expect(result).toEqual( 47 | expect.objectContaining({ 48 | id: 1 49 | }) 50 | ); 51 | }); 52 | }); 53 | }); 54 | -------------------------------------------------------------------------------- /server/gql/models/tests/supplierProducts/pagination.test.js: -------------------------------------------------------------------------------- 1 | import get from 'lodash/get'; 2 | import { supplierProductsTable } from '@server/utils/testUtils/mockData'; 3 | import { getResponse } from '@utils/testUtils'; 4 | 5 | describe('Supplier Products graphQL-server-DB pagination tests', () => { 6 | const supplierProductsQuery = ` 7 | query { 8 | supplierProducts (first: 1, limit: 1, offset: 0){ 9 | edges { 10 | node { 11 | id 12 | productId 13 | supplierId 14 | suppliers (first: 10) { 15 | edges { 16 | node { 17 | id 18 | } 19 | } 20 | } 21 | products (first: 10) { 22 | edges { 23 | node { 24 | id 25 | } 26 | } 27 | } 28 | } 29 | } 30 | pageInfo { 31 | hasNextPage 32 | hasPreviousPage 33 | startCursor 34 | endCursor 35 | } 36 | total 37 | } 38 | } 39 | `; 40 | 41 | it('should have a query to get the storeProducts', async () => { 42 | await getResponse(supplierProductsQuery).then(response => { 43 | const result = get(response, 'body.data.supplierProducts.edges[0].node'); 44 | expect(result).toEqual( 45 | expect.objectContaining({ 46 | id: supplierProductsTable[0].id, 47 | productId: supplierProductsTable[0].productId, 48 | supplierId: supplierProductsTable[0].supplierId 49 | }) 50 | ); 51 | }); 52 | }); 53 | 54 | it('should have the correct pageInfo', async () => { 55 | await getResponse(supplierProductsQuery).then(response => { 56 | const result = get(response, 'body.data.supplierProducts.pageInfo'); 57 | expect(result).toEqual( 58 | expect.objectContaining({ 59 | hasNextPage: true, 60 | hasPreviousPage: false 61 | }) 62 | ); 63 | }); 64 | }); 65 | }); 66 | -------------------------------------------------------------------------------- /server/gql/models/tests/supplierProducts/query.test.js: -------------------------------------------------------------------------------- 1 | import get from 'lodash/get'; 2 | import { productsTable, suppliersTable } from '@server/utils/testUtils/mockData'; 3 | import { getResponse, mockDBClient, resetAndMockDB } from '@utils/testUtils'; 4 | 5 | describe('supplier_products graphQL-server-DB query tests', () => { 6 | const id = 1; 7 | const supplierProductId = ` 8 | query { 9 | supplierProduct (id: ${id}) { 10 | id 11 | productId 12 | suppliers (first: 10) { 13 | edges { 14 | node { 15 | id 16 | } 17 | } 18 | } 19 | products (first: 10) { 20 | edges { 21 | node { 22 | id 23 | } 24 | } 25 | } 26 | } 27 | } 28 | `; 29 | 30 | it('should request for products and suppliers related to the supplierProducts', async () => { 31 | const dbClient = mockDBClient(); 32 | resetAndMockDB(null, {}, dbClient); 33 | 34 | jest.spyOn(dbClient.models.products, 'findAll').mockImplementation(() => [productsTable[0]]); 35 | jest.spyOn(dbClient.models.suppliers, 'findAll').mockImplementation(() => [suppliersTable[0]]); 36 | 37 | await getResponse(supplierProductId).then(response => { 38 | expect(get(response, 'body.data.supplierProduct')).toBeTruthy(); 39 | 40 | expect(dbClient.models.products.findAll.mock.calls.length).toBe(1); 41 | expect(dbClient.models.products.findAll.mock.calls[0][0].include[0].where).toEqual({ id }); 42 | expect(dbClient.models.products.findAll.mock.calls[0][0].include[0].model.name).toEqual('supplier_products'); 43 | 44 | expect(dbClient.models.suppliers.findAll.mock.calls.length).toBe(1); 45 | expect(dbClient.models.suppliers.findAll.mock.calls[0][0].include[0].where).toEqual({ supplierId: id }); 46 | expect(dbClient.models.suppliers.findAll.mock.calls[0][0].include[0].model.name).toEqual('supplier_products'); 47 | }); 48 | }); 49 | }); 50 | -------------------------------------------------------------------------------- /server/gql/models/tests/supplierProducts/supplierProducts.test.js: -------------------------------------------------------------------------------- 1 | import get from 'lodash/get'; 2 | import { graphqlSync, GraphQLSchema } from 'graphql'; 3 | import { createFieldsWithType, expectSameTypeNameOrKind } from '@utils/testUtils'; 4 | import { QueryRoot } from '../../../queries'; 5 | import { MutationRoot } from '../../../mutations'; 6 | import { timestamps } from '@gql/fields/timestamps'; 7 | import { supplierProductFields } from '@gql/models/supplierProducts'; 8 | 9 | const schema = new GraphQLSchema({ query: QueryRoot, mutation: MutationRoot }); 10 | 11 | let fields = []; 12 | 13 | fields = createFieldsWithType({ ...supplierProductFields, ...timestamps }); 14 | 15 | const query = ` 16 | { 17 | __type(name: "SupplierProduct") { 18 | name 19 | kind 20 | fields { 21 | name 22 | type { 23 | name 24 | kind 25 | } 26 | } 27 | } 28 | } 29 | `; 30 | describe('Supplier Product introspection tests', () => { 31 | it('should have the correct fields and types', async () => { 32 | const result = await graphqlSync({ schema, source: query }); 33 | const supplierProductFieldTypes = get(result, 'data.__type.fields'); 34 | const hasCorrectFieldTypes = expectSameTypeNameOrKind(supplierProductFieldTypes, fields); 35 | expect(hasCorrectFieldTypes).toBeTruthy(); 36 | }); 37 | 38 | it('should have a product field of type Product', async () => { 39 | const result = await graphqlSync({ schema, source: query }); 40 | const purchasedProductFieldTypes = get(result, 'data.__type.fields'); 41 | const productField = purchasedProductFieldTypes.find(field => field.name === 'products'); 42 | expect(productField.type.name).toBe('productsConnection'); 43 | expect(productField.type.kind).toBe('OBJECT'); 44 | }); 45 | 46 | it('should have a supplier field of type Supplier', async () => { 47 | const result = await graphqlSync({ schema, source: query }); 48 | const purchasedProductFieldTypes = get(result, 'data.__type.fields'); 49 | const supplierField = purchasedProductFieldTypes.find(field => field.name === 'suppliers'); 50 | expect(supplierField.type.name).toBe('suppliersConnection'); 51 | expect(supplierField.type.kind).toBe('OBJECT'); 52 | }); 53 | }); 54 | -------------------------------------------------------------------------------- /server/gql/models/tests/suppliers/mutation.test.js: -------------------------------------------------------------------------------- 1 | import get from 'lodash/get'; 2 | import { getResponse } from '@utils/testUtils'; 3 | 4 | describe('supplier graphQL-server-DB mutation tests', () => { 5 | const createSupplierMut = ` 6 | mutation { 7 | createSupplier ( 8 | name: "new supplier name" 9 | addressId: 1 10 | ) { 11 | id 12 | name 13 | addressId 14 | createdAt 15 | updatedAt 16 | deletedAt 17 | } 18 | } 19 | `; 20 | 21 | it('should have a mutation to create a new supplier', async () => { 22 | await getResponse(createSupplierMut).then(response => { 23 | const result = get(response, 'body.data.createSupplier'); 24 | expect(result).toMatchObject({ 25 | id: '1', 26 | name: 'new supplier name', 27 | addressId: 1 28 | }); 29 | }); 30 | }); 31 | 32 | const deleteSupplierMut = ` 33 | mutation { 34 | deleteSupplier ( 35 | id: 1 36 | ) { 37 | id 38 | } 39 | } 40 | `; 41 | 42 | it('should have a mutation to delete a supplier', async () => { 43 | await getResponse(deleteSupplierMut).then(response => { 44 | const result = get(response, 'body.data.deleteSupplier'); 45 | expect(result).toEqual( 46 | expect.objectContaining({ 47 | id: 1 48 | }) 49 | ); 50 | }); 51 | }); 52 | }); 53 | -------------------------------------------------------------------------------- /server/gql/models/tests/suppliers/pagination.test.js: -------------------------------------------------------------------------------- 1 | import get from 'lodash/get'; 2 | import { suppliersTable } from '@server/utils/testUtils/mockData'; 3 | import { getResponse } from '@utils/testUtils'; 4 | 5 | describe('Suppliers graphQL-server-DB pagination tests', () => { 6 | const suppliersQuery = ` 7 | query { 8 | suppliers (first: 1, limit: 1, offset: 0){ 9 | edges { 10 | node { 11 | id 12 | name 13 | addressId 14 | addresses (first: 10) { 15 | edges { 16 | node { 17 | id 18 | } 19 | } 20 | } 21 | products (first: 10) { 22 | edges { 23 | node { 24 | id 25 | } 26 | } 27 | } 28 | } 29 | } 30 | pageInfo { 31 | hasNextPage 32 | hasPreviousPage 33 | startCursor 34 | endCursor 35 | } 36 | total 37 | } 38 | } 39 | `; 40 | 41 | it('should have a query to get the storeProducts', async () => { 42 | await getResponse(suppliersQuery).then(response => { 43 | const result = get(response, 'body.data.suppliers.edges[0].node'); 44 | expect(result).toEqual( 45 | expect.objectContaining({ 46 | id: suppliersTable[0].id, 47 | name: suppliersTable[0].name, 48 | addressId: suppliersTable[0].addressId 49 | }) 50 | ); 51 | }); 52 | }); 53 | 54 | it('should have the correct pageInfo', async () => { 55 | await getResponse(suppliersQuery).then(response => { 56 | const result = get(response, 'body.data.suppliers.pageInfo'); 57 | expect(result).toEqual( 58 | expect.objectContaining({ 59 | hasNextPage: true, 60 | hasPreviousPage: false 61 | }) 62 | ); 63 | }); 64 | }); 65 | }); 66 | -------------------------------------------------------------------------------- /server/gql/models/tests/suppliers/query.test.js: -------------------------------------------------------------------------------- 1 | import get from 'lodash/get'; 2 | import { addressesTable, productsTable } from '@server/utils/testUtils/mockData'; 3 | import { getResponse, mockDBClient, resetAndMockDB } from '@utils/testUtils'; 4 | 5 | describe('supplier graphQL-server-DB query tests', () => { 6 | const id = 1; 7 | const supplierName = ` 8 | query { 9 | supplier (id: ${id}) { 10 | id 11 | name 12 | addresses (first: 10) { 13 | edges { 14 | node { 15 | id 16 | } 17 | } 18 | } 19 | products (first: 10) { 20 | edges { 21 | node { 22 | id 23 | } 24 | } 25 | } 26 | } 27 | } 28 | `; 29 | it('should request for products and addresses related to the suppliers', async () => { 30 | const dbClient = mockDBClient(); 31 | resetAndMockDB(null, {}, dbClient); 32 | 33 | jest.spyOn(dbClient.models.products, 'findAll').mockImplementation(() => [productsTable[0]]); 34 | jest.spyOn(dbClient.models.addresses, 'findAll').mockImplementation(() => [addressesTable[0]]); 35 | 36 | await getResponse(supplierName).then(response => { 37 | expect(get(response, 'body.data.supplier')).toBeTruthy(); 38 | 39 | expect(dbClient.models.products.findAll.mock.calls.length).toBe(1); 40 | expect(dbClient.models.products.findAll.mock.calls[0][0].include[0].where).toEqual({ id }); 41 | expect(dbClient.models.products.findAll.mock.calls[0][0].include[0].model.name).toEqual('suppliers'); 42 | 43 | expect(dbClient.models.addresses.findAll.mock.calls.length).toBe(1); 44 | expect(dbClient.models.addresses.findAll.mock.calls[0][0].include[0].where).toEqual({ id }); 45 | expect(dbClient.models.addresses.findAll.mock.calls[0][0].include[0].model.name).toEqual('suppliers'); 46 | }); 47 | }); 48 | }); 49 | -------------------------------------------------------------------------------- /server/gql/models/tests/suppliers/suppliers.test.js: -------------------------------------------------------------------------------- 1 | import get from 'lodash/get'; 2 | import { graphqlSync, GraphQLSchema } from 'graphql'; 3 | import { createFieldsWithType, expectSameTypeNameOrKind } from '@utils/testUtils'; 4 | import { QueryRoot } from '../../../queries'; 5 | import { MutationRoot } from '../../../mutations'; 6 | import { timestamps } from '@gql/fields/timestamps'; 7 | import { supplierFields } from '@gql/models/suppliers'; 8 | 9 | const schema = new GraphQLSchema({ query: QueryRoot, mutation: MutationRoot }); 10 | 11 | let fields = []; 12 | 13 | fields = createFieldsWithType({ ...supplierFields, ...timestamps }); 14 | 15 | const query = ` 16 | { 17 | __type(name: "Supplier") { 18 | name 19 | kind 20 | fields { 21 | name 22 | type { 23 | name 24 | kind 25 | } 26 | } 27 | } 28 | } 29 | `; 30 | describe('Supplier introspection tests', () => { 31 | it('should have the correct fields and types', async () => { 32 | const result = await graphqlSync({ schema, source: query }); 33 | const supplierFieldTypes = get(result, 'data.__type.fields'); 34 | const hasCorrectFieldTypes = expectSameTypeNameOrKind(supplierFieldTypes, fields); 35 | expect(hasCorrectFieldTypes).toBeTruthy(); 36 | }); 37 | it('should have a store connection', async () => { 38 | const result = await graphqlSync({ schema, source: query }); 39 | const addressFieldTypes = get(result, 'data.__type.fields'); 40 | const storeField = addressFieldTypes.find(field => field.name === 'products'); 41 | expect(storeField.type.kind).toBe('OBJECT'); 42 | expect(storeField.type.name).toBe('productsConnection'); 43 | }); 44 | }); 45 | -------------------------------------------------------------------------------- /server/gql/models/users/index.js: -------------------------------------------------------------------------------- 1 | import { GraphQLID, GraphQLInt, GraphQLNonNull, GraphQLObjectType, GraphQLString } from 'graphql'; 2 | import { getNode } from '@gql/node'; 3 | import { createConnection } from 'graphql-sequelize'; 4 | import { timestamps } from '@gql/fields/timestamps'; 5 | import db from '@database/models'; 6 | import { totalConnectionFields, baseListResolver } from '@utils/index'; 7 | import { sequelizedWhere } from '@database/dbUtils'; 8 | import { getQueryFields, TYPE_ATTRIBUTES } from '@server/utils/gqlFieldUtils'; 9 | 10 | const { nodeInterface } = getNode(); 11 | 12 | export const userFields = { 13 | id: { type: new GraphQLNonNull(GraphQLID) }, 14 | firstName: { type: new GraphQLNonNull(GraphQLString) }, 15 | lastName: { type: new GraphQLNonNull(GraphQLString) } 16 | }; 17 | 18 | const GraphQLUser = new GraphQLObjectType({ 19 | name: 'user', 20 | interfaces: [nodeInterface], 21 | fields: () => ({ 22 | ...getQueryFields(userFields, TYPE_ATTRIBUTES.isNonNull), 23 | id: { type: new GraphQLNonNull(GraphQLID) }, 24 | email: { type: new GraphQLNonNull(GraphQLString) }, 25 | ...timestamps 26 | }) 27 | }); 28 | 29 | const UserConnection = createConnection({ 30 | name: 'users', 31 | target: db.users, 32 | nodeType: GraphQLUser, 33 | before: (findOptions, args, context) => { 34 | findOptions.include = findOptions.include || []; 35 | findOptions.where = sequelizedWhere(findOptions.where, args.where); 36 | return findOptions; 37 | }, 38 | ...totalConnectionFields 39 | }); 40 | 41 | export { GraphQLUser }; 42 | 43 | // queries on the users table. 44 | export const userQueries = { 45 | args: { 46 | id: { 47 | type: new GraphQLNonNull(GraphQLInt) 48 | } 49 | }, 50 | query: { 51 | type: GraphQLUser 52 | }, 53 | model: db.users 54 | }; 55 | 56 | // lists on the users table. 57 | export const userLists = { 58 | list: { 59 | ...UserConnection, 60 | resolve: (...args) => baseListResolver(UserConnection, ...args), 61 | type: UserConnection.connectionType, 62 | args: UserConnection.connectionArgs 63 | }, 64 | model: db.users 65 | }; 66 | 67 | export const userMutations = { 68 | args: userFields, 69 | type: GraphQLUser, 70 | model: db.users 71 | }; 72 | -------------------------------------------------------------------------------- /server/gql/node.js: -------------------------------------------------------------------------------- 1 | import { createNodeInterface } from 'graphql-sequelize'; 2 | import { getClient } from '@database/index'; 3 | 4 | let nodeInterface; 5 | 6 | export function getNode() { 7 | if (!nodeInterface) { 8 | nodeInterface = createNodeInterface(getClient()); 9 | return nodeInterface; 10 | } 11 | return nodeInterface; 12 | } 13 | -------------------------------------------------------------------------------- /server/gql/queries.js: -------------------------------------------------------------------------------- 1 | import { GraphQLObjectType, GraphQLNonNull, GraphQLInt } from 'graphql'; 2 | import camelCase from 'lodash/camelCase'; 3 | import pluralize from 'pluralize'; 4 | import { defaultListArgs, defaultArgs, resolver } from 'graphql-sequelize'; 5 | import { Aggregate } from '@gql/models/aggregate'; 6 | import { getNode } from '@gql/node'; 7 | import { getGqlModels } from '@server/utils/autogenHelper'; 8 | import { customListArgs } from '@gql/fields/args'; 9 | 10 | const { nodeField } = getNode(); 11 | const DB_TABLES_QUERIES = getGqlModels({ type: 'Queries', blacklist: ['aggregate', 'timestamps'] }); 12 | export const addQueries = () => { 13 | const query = { 14 | aggregate: Aggregate 15 | }; 16 | Object.keys(DB_TABLES_QUERIES).forEach(table => { 17 | query[camelCase(table)] = { 18 | ...DB_TABLES_QUERIES[table].query, 19 | resolve: resolver(DB_TABLES_QUERIES[table].model), 20 | args: { 21 | id: { type: new GraphQLNonNull(GraphQLInt) }, 22 | ...DB_TABLES_QUERIES[table].args, 23 | ...defaultArgs(DB_TABLES_QUERIES[table].model) 24 | } 25 | }; 26 | }); 27 | return query; 28 | }; 29 | const DB_TABLES_LISTS = getGqlModels({ type: 'Lists', blacklist: ['aggregate', 'timestamps'] }); 30 | export const addLists = () => { 31 | const list = { 32 | aggregate: Aggregate 33 | }; 34 | Object.keys(DB_TABLES_LISTS).forEach(table => { 35 | list[pluralize(camelCase(table))] = { 36 | ...DB_TABLES_LISTS[table].list, 37 | args: { 38 | ...DB_TABLES_LISTS[table].list?.args, 39 | ...defaultListArgs(DB_TABLES_LISTS[table].model), 40 | ...customListArgs 41 | } 42 | }; 43 | }); 44 | return list; 45 | }; 46 | 47 | export const QueryRoot = new GraphQLObjectType({ 48 | name: 'Query', 49 | node: nodeField, 50 | fields: () => ({ 51 | ...addQueries(), 52 | ...addLists(), 53 | aggregate: Aggregate 54 | }) 55 | }); 56 | -------------------------------------------------------------------------------- /server/gql/subscriptions.js: -------------------------------------------------------------------------------- 1 | import { GraphQLObjectType } from 'graphql'; 2 | import { PurchasedProductSubscription } from './subscriptions/purchasedProductSubscription'; 3 | export const SubscriptionRoot = new GraphQLObjectType({ 4 | name: 'Subscription', 5 | fields: { 6 | newPurchasedProduct: { 7 | ...PurchasedProductSubscription 8 | } 9 | } 10 | }); 11 | -------------------------------------------------------------------------------- /server/gql/subscriptions/purchasedProductSubscription/index.js: -------------------------------------------------------------------------------- 1 | import { getAsyncInterator } from '@utils/iterator'; 2 | import { SUBSCRIPTION_TOPICS } from '@server/utils/constants'; 3 | import { GraphQLInt, GraphQLNonNull, GraphQLObjectType } from 'graphql'; 4 | import { GraphQLDateTime } from 'graphql-iso-date'; 5 | import { withFilter } from 'graphql-subscriptions'; 6 | import { checkFilterCondition } from '../purchasedProductSubscription/purchasedProductSubsUtil'; 7 | 8 | export const PurchasedProductSubscription = { 9 | type: new GraphQLObjectType({ 10 | name: SUBSCRIPTION_TOPICS.NEW_PURCHASED_PRODUCT, 11 | fields: () => ({ 12 | productId: { 13 | type: new GraphQLNonNull(GraphQLInt) 14 | }, 15 | deliveryDate: { 16 | type: new GraphQLNonNull(GraphQLDateTime) 17 | }, 18 | price: { 19 | type: new GraphQLNonNull(GraphQLInt) 20 | }, 21 | storeId: { 22 | type: new GraphQLNonNull(GraphQLInt) 23 | } 24 | }) 25 | }), 26 | args: { 27 | storeId: { 28 | type: new GraphQLNonNull(GraphQLInt) 29 | } 30 | }, 31 | subscribe: withFilter(getAsyncInterator(SUBSCRIPTION_TOPICS.NEW_PURCHASED_PRODUCT), checkFilterCondition) 32 | }; 33 | -------------------------------------------------------------------------------- /server/gql/subscriptions/purchasedProductSubscription/purchasedProductSubsUtil.js: -------------------------------------------------------------------------------- 1 | export function checkFilterCondition(payload, variables) { 2 | return Number(payload.newPurchasedProduct.storeId) === variables.storeId; 3 | } 4 | -------------------------------------------------------------------------------- /server/gql/subscriptions/purchasedProductSubscription/tests/index.test.js: -------------------------------------------------------------------------------- 1 | import { getResponse } from '@utils/testUtils'; 2 | import { checkFilterCondition } from '../purchasedProductSubsUtil'; 3 | import * as module from 'graphql-subscriptions'; 4 | 5 | describe('Subscription tests', () => { 6 | it('should add a subscription', async () => { 7 | const subscription = ` 8 | subscription Subscription{ 9 | newPurchasedProduct(supplierId: 1671) { 10 | price 11 | supplierId 12 | deliveryDate 13 | productId 14 | } 15 | } 16 | `; 17 | const spy = jest.spyOn(module, 'withFilter'); 18 | await getResponse(subscription); 19 | expect(spy).toBeCalledWith(expect.any(Function), checkFilterCondition); 20 | }); 21 | }); 22 | -------------------------------------------------------------------------------- /server/gql/subscriptions/purchasedProductSubscription/tests/purchasedProductSubsUtil.test.js: -------------------------------------------------------------------------------- 1 | import { checkFilterCondition } from '../purchasedProductSubsUtil'; 2 | 3 | describe('Purchased Products subscription utils test', () => { 4 | describe('getFilteredSubscription tests', () => { 5 | it('should return true if the supplierId in payload is equal to supplierId in variables ', () => { 6 | const payload = { 7 | newPurchasedProduct: { 8 | storeId: 1 9 | } 10 | }; 11 | const variables = { 12 | storeId: 1 13 | }; 14 | const res = checkFilterCondition(payload, variables); 15 | expect(res).toBeTruthy(); 16 | }); 17 | }); 18 | }); 19 | -------------------------------------------------------------------------------- /server/gql/tests/queries.test.js: -------------------------------------------------------------------------------- 1 | import get from 'lodash/get'; 2 | import includes from 'lodash/includes'; 3 | import pluralize from 'pluralize'; 4 | import { graphql, GraphQLSchema } from 'graphql'; 5 | 6 | import { QueryRoot } from '../queries'; 7 | import { MutationRoot } from '../mutations'; 8 | 9 | const schema = new GraphQLSchema({ query: QueryRoot, mutation: MutationRoot }); 10 | const allModels = ['address', 'product', 'purchasedProduct', 'storeProduct', 'store', 'supplierProduct', 'supplier']; 11 | 12 | allModels.forEach(model => allModels.push(pluralize(model))); 13 | 14 | describe('query tests', () => { 15 | it('should create queries for all the models', async () => { 16 | const source = ` 17 | query { 18 | __schema { 19 | queryType { 20 | fields { 21 | name 22 | } 23 | } 24 | } 25 | } 26 | `; 27 | const result = await graphql({ schema, source }); 28 | const queryRoot = get(result, 'data.__schema.queryType.fields', []); 29 | const allQueries = []; 30 | queryRoot.forEach(query => allQueries.push(query.name)); 31 | const hasModelWithoutQuery = allModels.some(model => !includes(allQueries, model)); 32 | expect(hasModelWithoutQuery).toBeFalsy(); 33 | }); 34 | }); 35 | -------------------------------------------------------------------------------- /server/middleware/gqlAuth/constants.js: -------------------------------------------------------------------------------- 1 | import { logger } from '@utils'; 2 | 3 | export const RESTRICTED = { 4 | query: { 5 | purchasedProducts: { 6 | // groups: [ "ADMIN", "SUPER_ADMIN", "USER"] 7 | isUnauthorized: async (decodedToken, args) => { 8 | logger().info(JSON.stringify({ decodedToken, args })); 9 | // sample logic to show custom restrictions 10 | // only user_id = 1 will be able to access this API 11 | return decodedToken?.user?.id !== 1; 12 | } 13 | } 14 | } 15 | }; 16 | export const GQL_QUERY_TYPES = { 17 | query: { 18 | whitelist: ['purchasedProduct'] 19 | }, 20 | mutation: { 21 | whitelist: ['signUp', 'signIn'] 22 | }, 23 | subscription: { whitelist: [] } 24 | }; 25 | -------------------------------------------------------------------------------- /server/middleware/logger/index.js: -------------------------------------------------------------------------------- 1 | import { logger } from '@utils'; 2 | 3 | export default function logReqRes(req, res, next) { 4 | const oldWrite = res.write; 5 | const oldEnd = res.end; 6 | 7 | const chunks = []; 8 | 9 | res.write = (...restArgs) => { 10 | chunks.push(Buffer.from(restArgs[0])); 11 | oldWrite.apply(res, restArgs); 12 | }; 13 | 14 | res.end = (...restArgs) => { 15 | if (restArgs[0]) { 16 | chunks.push(Buffer.from(restArgs[0])); 17 | } 18 | const body = Buffer.concat(chunks).toString('utf8'); 19 | 20 | logger().info( 21 | JSON.stringify({ 22 | time: new Date().toUTCString(), 23 | fromIP: req.headers['x-forwarded-for'] || req.connection.remoteAddress, 24 | method: req.method, 25 | originalUri: req.originalUrl, 26 | uri: req.url, 27 | requestData: req.body, 28 | responseData: body, 29 | referer: req.headers.referer || '', 30 | ua: req.headers['user-agent'] 31 | }) 32 | ); 33 | 34 | oldEnd.apply(res, restArgs); 35 | }; 36 | 37 | next(); 38 | } 39 | -------------------------------------------------------------------------------- /server/services/circuitbreaker.js: -------------------------------------------------------------------------------- 1 | import CircuitBreaker from 'opossum'; 2 | import { logger } from '@utils'; 3 | import { sendMessage } from './slack'; 4 | 5 | const options = { 6 | timeout: 3000, 7 | errorThresholdPercentage: 50, 8 | resetTimeout: 30000 9 | }; 10 | 11 | export const newCircuitBreaker = (func, fallbackMsg) => { 12 | const breaker = new CircuitBreaker(func, options); 13 | breaker.fallback((params, err) => { 14 | logger().error('inside circuitbreaker fallback', err); 15 | sendMessage(err); 16 | logger().error('fallbackMsg: ', fallbackMsg, 'params: ', params, 'error:', err.message); 17 | // eslint-disable-next-line 18 | return `${fallbackMsg}. ${err.message || err}`; 19 | }); 20 | return breaker; 21 | }; 22 | -------------------------------------------------------------------------------- /server/services/redis.js: -------------------------------------------------------------------------------- 1 | import Redis from 'ioredis'; 2 | 3 | export const redis = new Redis(process.env.REDIS_PORT, process.env.REDIS_HOST); 4 | -------------------------------------------------------------------------------- /server/services/slack.js: -------------------------------------------------------------------------------- 1 | import slackNotify from 'slack-notify'; 2 | import { logger } from '@utils'; 3 | import rTracer from 'cls-rtracer'; 4 | 5 | let slack; 6 | export function getSlackInstance() { 7 | if (!slack) { 8 | slack = slackNotify(process.env.SLACK_WEBHOOK_URL); 9 | } 10 | return slack; 11 | } 12 | export async function sendMessage(text) { 13 | if (['production', 'development', 'test'].includes(process.env.ENVIRONMENT_NAME)) { 14 | return getSlackInstance() 15 | .send({ 16 | text: JSON.stringify({ requestId: rTracer.id(), error: text, env: process.env.ENVIRONMENT_NAME }), 17 | username: 'node-express-alerts' 18 | }) 19 | .catch(err => logger().error(`Slack error: ${err}. Please check webhook url`)); 20 | } 21 | } 22 | -------------------------------------------------------------------------------- /server/services/tests/circuitbreaker.test.js: -------------------------------------------------------------------------------- 1 | import { newCircuitBreaker } from '../circuitbreaker'; 2 | 3 | describe('newCircuitBreaker', () => { 4 | const fallbackMessage = 'Some fallback message'; 5 | it('should return the response from the API', async () => { 6 | const data = 'this is some API response'; 7 | const somefunc = async () => ({ 8 | data 9 | }); 10 | const testme = 'testme'; 11 | const breaker = newCircuitBreaker(somefunc, fallbackMessage); 12 | const res = await breaker.fire(testme); 13 | expect(res.data).toBe(data); 14 | }); 15 | it('should return the fallback message if the API throws an error', async () => { 16 | const customError = 'This is some error'; 17 | 18 | const somefunc = async () => { 19 | throw new Error(customError); 20 | }; 21 | const testme = 'testme'; 22 | const breaker = newCircuitBreaker(somefunc, fallbackMessage); 23 | const res = await breaker.fire(testme); 24 | expect(res).toBe(`${fallbackMessage}. ${customError}`); 25 | }); 26 | it('should return the fallback message if the API throws an error without a message.', async () => { 27 | const somefunc = async () => { 28 | throw new Error(); 29 | }; 30 | const testme = 'testme'; 31 | const breaker = newCircuitBreaker(somefunc, fallbackMessage); 32 | const res = await breaker.fire(testme); 33 | expect(res).toBe(`${fallbackMessage}. Error`); 34 | }); 35 | }); 36 | -------------------------------------------------------------------------------- /server/services/tests/slack.test.js: -------------------------------------------------------------------------------- 1 | import { sendMessage, getSlackInstance } from '../slack'; 2 | import * as utils from '@utils'; 3 | 4 | describe('Slack tests', () => { 5 | const message = 'Some generic message'; 6 | describe('slack send tests', () => { 7 | it('should send the message', async () => { 8 | jest.spyOn(getSlackInstance(), 'send').mockImplementation( 9 | msg => 10 | new Promise((resolve, reject) => { 11 | resolve(msg); 12 | }) 13 | ); 14 | 15 | const res = await sendMessage(message); 16 | // eslint-disable-next-line 17 | expect(res.text).toEqual(`{\"error\":\"${message}\",\"env\":\"test\"}`); 18 | }); 19 | }); 20 | describe('slack error tests', () => { 21 | it('should send the error', async () => { 22 | const error = new Error(); 23 | jest.spyOn(utils, 'logger').mockImplementation(() => { 24 | const obj = { 25 | error: err => err 26 | }; 27 | return obj; 28 | }); 29 | jest.spyOn(getSlackInstance(), 'send').mockImplementation( 30 | msg => 31 | new Promise((resolve, reject) => { 32 | reject(error); 33 | }) 34 | ); 35 | const res = await sendMessage(message); 36 | expect(res).toBe(`Slack error: ${error}. Please check webhook url`); 37 | }); 38 | }); 39 | }); 40 | -------------------------------------------------------------------------------- /server/tests/index.test.js: -------------------------------------------------------------------------------- 1 | import { restfulGetResponse, getResponse, resetAndMockDB } from '@utils/testUtils'; 2 | 3 | const query = ` 4 | query { 5 | __schema { 6 | queryType { 7 | fields { 8 | name 9 | } 10 | } 11 | } 12 | } 13 | `; 14 | describe('init', () => { 15 | const mocks = {}; 16 | it('should successfully configure environment variables and connect to the database', async () => { 17 | mocks.dotenv = { 18 | config: jest.fn 19 | }; 20 | jest.mock('dotenv', () => mocks.dotenv); 21 | jest.spyOn(mocks.dotenv, 'config'); 22 | await require('../index').init(); 23 | 24 | // check if the environments are being configured correctly 25 | expect(mocks.dotenv.config.mock.calls.length).toBe(2); 26 | }); 27 | 28 | it('should ensure the no of call to app.use', async () => { 29 | const { init, app } = await require('../index'); 30 | mocks.app = app; 31 | jest.spyOn(mocks.app, 'use'); 32 | await init(); 33 | 34 | // check if the server has been started 35 | expect(mocks.app.use.mock.calls.length).toBe(6); 36 | expect(mocks.app.use.mock.calls[0][0]).toEqual(expect.any(Function)); 37 | }); 38 | 39 | it('should invoke @database.connect ', async () => { 40 | mocks.db = { getClient: jest.fn(), connect: jest.fn() }; 41 | jest.spyOn(mocks.db, 'connect'); 42 | jest.doMock('@database', () => mocks.db); 43 | 44 | await require('../index'); 45 | 46 | // the database connection is being made 47 | expect(mocks.db.connect.mock.calls.length).toBe(1); 48 | }); 49 | }); 50 | describe('TestApp: Server', () => { 51 | it('should respond to /graphql', async () => { 52 | resetAndMockDB(); 53 | await getResponse(query).then(response => { 54 | expect(response.statusCode).toBe(200); 55 | expect(response.body.data.__schema.queryType.fields[0].name).toBeTruthy(); 56 | }); 57 | }); 58 | }); 59 | 60 | describe('health check API', () => { 61 | it('respond with status 200 and correct message', async () => { 62 | const app = require('../index').app; 63 | const res = await restfulGetResponse('/', app); 64 | expect(res.statusCode).toBe(200); 65 | expect(res.body).toStrictEqual({ data: 'Service up and running!' }); 66 | }); 67 | }); 68 | 69 | describe('github API', () => { 70 | require('@services/circuitbreaker').__setupMocks(() => ({ data: { items: [] } })); 71 | it('respond with status 200 and correct message when CB is closed', async () => { 72 | process.env.ENVIRONMENT_NAME = 'local'; 73 | process.env.IS_TESTING = 'local'; 74 | const data = { data: 'this is fine' }; 75 | require('@services/circuitbreaker').__setupMocks(() => data); 76 | const app = require('../index').app; 77 | const res = await restfulGetResponse('/github', app); 78 | expect(res.statusCode).toBe(200); 79 | expect(res.body).toStrictEqual(data); 80 | process.env.ENVIRONMENT_NAME = 'test'; 81 | }); 82 | it('respond with status 424 and an error message when CB is open', async () => { 83 | process.env.ENVIRONMENT_NAME = 'local'; 84 | const error = 'Github API is down.'; 85 | require('@services/circuitbreaker').__setupMocks(() => error); 86 | const app = require('../index').app; 87 | const res = await restfulGetResponse('/github', app); 88 | expect(res.statusCode).toBe(424); 89 | expect(res.body).toStrictEqual({ error }); 90 | }); 91 | }); 92 | 93 | describe('fetchFromGithub', () => { 94 | it('should call the github api', async () => { 95 | const repo = 'repo'; 96 | const { fetchFromGithub } = require('../index'); 97 | const res = { data: { items: [] } }; 98 | const axiosSpy = jest.spyOn(require('axios'), 'get').mockImplementation(() => res); 99 | const data = await fetchFromGithub(repo); 100 | expect(data).toBe(res); 101 | expect(axiosSpy).toBeCalled(); 102 | expect(axiosSpy).toBeCalledWith(`https://api.github.com/search/repositories?q=${repo}&per_page=2`); 103 | }); 104 | }); 105 | -------------------------------------------------------------------------------- /server/utils/autogenHelper.js: -------------------------------------------------------------------------------- 1 | import fs from 'fs'; 2 | import path from 'path'; 3 | import pluralize from 'pluralize'; 4 | 5 | export const getGqlModels = ({ type, blacklist }) => { 6 | const getModelFiles = modelsFolderPath => { 7 | if (typeof modelsFolderPath !== 'string') { 8 | throw new Error('modelPathString is invalid'); 9 | } 10 | return fs 11 | .readdirSync(modelsFolderPath) 12 | .filter(folder => { 13 | try { 14 | const f = fs.lstatSync(`${modelsFolderPath + folder}/index.js`); 15 | return f.isFile(); 16 | } catch {} 17 | return false; 18 | }) 19 | .filter(f => !blacklist.includes(f)) 20 | .map(folder => `${folder}/index.js`); 21 | }; 22 | const gqlModelsFolderPath = path.join(__dirname, '../gql/models/'); 23 | 24 | const DB_TABLES = {}; 25 | const fileArray = getModelFiles(gqlModelsFolderPath); 26 | fileArray.forEach(f => { 27 | const gqlModel = require(`../gql/models/${f}`); 28 | const name = pluralize.singular(f.split('/')[0].split('.')[0]); 29 | DB_TABLES[name] = gqlModel[`${name}${type}`]; 30 | }); 31 | return DB_TABLES; 32 | }; 33 | -------------------------------------------------------------------------------- /server/utils/configureEnv.js: -------------------------------------------------------------------------------- 1 | const dotenv = require('dotenv'); 2 | 3 | module.exports = () => { 4 | dotenv.config({ path: `.env.${process.env.ENVIRONMENT_NAME}` }); 5 | }; 6 | -------------------------------------------------------------------------------- /server/utils/constants.js: -------------------------------------------------------------------------------- 1 | export const TIMESTAMP = 'YYYY-MM-DD HH:mm:ss'; 2 | export const MUTATION_TYPE = { 3 | CREATE: 'create', 4 | DELETE: 'delete', 5 | UPDATE: 'update' 6 | }; 7 | 8 | export const SUBSCRIPTION_TOPICS = { 9 | NEW_PURCHASED_PRODUCT: 'newPurchasedProduct' 10 | }; 11 | 12 | // This date indicates when the mutations on createPurchasedProduct went live. We will not have to recalculate aggregate from database after this date. 13 | export const REDIS_IMPLEMENTATION_DATE = '2022-03-16'; 14 | export const MAX_PAGE_SIZE = 10; 15 | -------------------------------------------------------------------------------- /server/utils/gqlFieldUtils.js: -------------------------------------------------------------------------------- 1 | import cloneDeep from 'lodash/cloneDeep'; 2 | import get from 'lodash/get'; 3 | import { GraphQLNonNull } from 'graphql'; 4 | export const TYPE_ATTRIBUTES = { 5 | isNonNull: 'isNonNull', 6 | isCreateRequired: 'isCreateRequired', 7 | isUpdateRequired: 'isUpdateRequired' 8 | }; 9 | 10 | export const CREATE_AND_QUERY_REQUIRED_ARGS = { 11 | [TYPE_ATTRIBUTES.isNonNull]: true, 12 | [TYPE_ATTRIBUTES.isCreateRequired]: true 13 | }; 14 | export const getQueryFields = (fields, keyToCheck) => { 15 | const fieldsClone = cloneDeep(fields); 16 | Object.keys(fieldsClone).forEach(key => { 17 | if (get(fieldsClone, `${key}.${keyToCheck}`)) { 18 | fieldsClone[key].type = new GraphQLNonNull(fieldsClone[key].type); 19 | } 20 | }); 21 | return fieldsClone; 22 | }; 23 | -------------------------------------------------------------------------------- /server/utils/gqlSchemaParsers.js: -------------------------------------------------------------------------------- 1 | import { Kind } from 'graphql/language'; 2 | 3 | export function parseObject(typeName, ast, variables) { 4 | const value = {}; 5 | ast.fields.forEach(field => { 6 | value[field.name.value] = parseLiteral(field.value, field.value, variables); 7 | }); 8 | 9 | return value; 10 | } 11 | 12 | export function parseLiteral(typeName, ast, variables) { 13 | switch (ast.kind) { 14 | case Kind.STRING: 15 | case Kind.BOOLEAN: 16 | return ast.value; 17 | case Kind.INT: 18 | case Kind.FLOAT: 19 | return parseFloat(ast.value); 20 | case Kind.OBJECT: 21 | return parseObject(typeName, ast, variables); 22 | case Kind.LIST: 23 | return ast.values.map(n => parseLiteral(typeName, n, variables)); 24 | case Kind.NULL: 25 | return null; 26 | case Kind.VARIABLE: 27 | return variables?.[ast.name.value] ?? undefined; 28 | default: 29 | throw new TypeError(`${typeName} cannot represent value: ${JSON.stringify(ast)}`); 30 | } 31 | } 32 | 33 | export const convertToMap = (argArr, variables = {}) => { 34 | const args = {}; 35 | argArr.forEach(arg => { 36 | if (arg.value.kind === 'Variable') { 37 | args[arg.name.value] = variables[arg.value.name.value]; 38 | } else if (arg.value.kind === 'IntValue') { 39 | args[arg.name.value] = parseInt(arg.value.value, 10); 40 | } else if (arg.value.kind === 'ObjectValue') { 41 | args[arg.name.value] = parseObject(Kind.OBJECT, arg.value, variables); 42 | } else { 43 | args[arg.name.value] = arg.value.value; 44 | } 45 | }); 46 | return args; 47 | }; 48 | -------------------------------------------------------------------------------- /server/utils/index.js: -------------------------------------------------------------------------------- 1 | import isEmpty from 'lodash/isEmpty'; 2 | import { GraphQLInt, GraphQLNonNull } from 'graphql'; 3 | import { createLogger, format, transports } from 'winston'; 4 | import rTracer from 'cls-rtracer'; 5 | import { MAX_PAGE_SIZE } from './constants'; 6 | 7 | const { combine, timestamp, printf } = format; 8 | export const isTestEnv = () => process.env.ENVIRONMENT_NAME === 'test' || process.env.NODE_ENV === 'test'; 9 | export const isLocalEnv = () => process.env.ENVIRONMENT_NAME === 'local'; 10 | 11 | export const addWhereClause = (where, clause) => { 12 | if (isEmpty(where)) { 13 | where += ' WHERE ('; 14 | } else { 15 | where += ' AND ('; 16 | } 17 | return ` ${where} ${clause} ) `; 18 | }; 19 | 20 | export const totalConnectionFields = { 21 | connectionFields: { 22 | total: { 23 | resolve: meta => meta.fullCount, 24 | type: new GraphQLNonNull(GraphQLInt) 25 | } 26 | } 27 | }; 28 | 29 | export const stringifyWithCheck = message => { 30 | if (!message) { 31 | return ''; 32 | } 33 | try { 34 | return JSON.stringify(message); 35 | } catch (err) { 36 | if (message.data) { 37 | return stringifyWithCheck(message.data); 38 | } else { 39 | console.log(message); 40 | return `unable to unfurl message: ${message}`; 41 | } 42 | } 43 | }; 44 | export const logger = () => { 45 | const rTracerFormat = printf(info => { 46 | const rid = rTracer.id(); 47 | // @ts-ignore 48 | const infoSplat = info[Symbol.for('splat')] || []; 49 | 50 | let message = `${info.timestamp}: ${stringifyWithCheck(info.message)} ${stringifyWithCheck(...infoSplat)}`; 51 | if (rid) { 52 | message = `[request-id:${rid}]: ${message}`; 53 | } 54 | return message; 55 | }); 56 | return createLogger({ 57 | format: combine(timestamp(), rTracerFormat), 58 | transports: [new transports.Console()] 59 | }); 60 | }; 61 | 62 | export const transformSQLError = e => (e.errors || []).map(err => err.message).join('. ') || e.original; 63 | 64 | export const getLogger = () => { 65 | if (process.env.NODE_ENV === 'test') { 66 | return false; 67 | } 68 | return args => logger().info(args); 69 | }; 70 | 71 | export const listResolver = (model, source, args, context, info) => { 72 | if (!args.first && !args.last) { 73 | throw new Error(`${model.model.name}:: Either first or last is required`); 74 | } 75 | if (args.first > MAX_PAGE_SIZE || args.last > MAX_PAGE_SIZE) { 76 | throw new Error(`${model.model.name}:: first and last should be less than ${MAX_PAGE_SIZE}`); 77 | } 78 | return model.list.resolve(source, args, context, info); 79 | }; 80 | 81 | export const baseListResolver = (connection, ...args) => { 82 | const vars = args[1]; 83 | if (vars.limit > MAX_PAGE_SIZE) { 84 | throw new Error(`Limit cannot be greater than ${MAX_PAGE_SIZE}`); 85 | } 86 | return connection.resolve(...args); 87 | }; 88 | -------------------------------------------------------------------------------- /server/utils/iterator.js: -------------------------------------------------------------------------------- 1 | import { pubsub } from '@utils/pubsub'; 2 | export function getAsyncInterator(topicName) { 3 | return () => pubsub.asyncIterator(topicName); 4 | } 5 | -------------------------------------------------------------------------------- /server/utils/migrateUtils.js: -------------------------------------------------------------------------------- 1 | const fs = require('fs'); 2 | const shell = require('shelljs'); 3 | 4 | function getVersion(currentFileName) { 5 | let version; 6 | shell.ls(`./migrations`).forEach((item, index) => { 7 | if (item === currentFileName) { 8 | version = index + 1; 9 | } 10 | }); 11 | return version; 12 | } 13 | 14 | module.exports = { 15 | migrate: async function(currentFileName, queryInterface) { 16 | const version = getVersion(currentFileName.split('/')[currentFileName.split('/').length - 1]); 17 | const directories = shell.ls(`./resources/v${version}`); 18 | for (let index = 0; index < directories.length; index++) { 19 | const fileName = directories[index]; 20 | await queryInterface.sequelize.query(fs.readFileSync(`./resources/v${version}/${fileName}`, 'utf-8')).catch(e => { 21 | const error = e.original.sqlMessage; 22 | if (error && error.startsWith('Table') && error.endsWith('already exists')) { 23 | // If the database is already built add this migration to sequelizeMeta table. 24 | return; 25 | } 26 | throw e; 27 | }); 28 | } 29 | }, 30 | getVersion 31 | }; 32 | -------------------------------------------------------------------------------- /server/utils/passwordUtils.js: -------------------------------------------------------------------------------- 1 | import crypto from 'crypto'; 2 | 3 | export const createPassword = password => { 4 | const salt = crypto.randomBytes(16).toString('hex'); // Generate a random salt 5 | const hashedPassword = crypto.scryptSync(password, salt, 64).toString('hex'); // Generate a hash 6 | return `${salt}:${hashedPassword}`; 7 | }; 8 | 9 | export const checkPassword = (passwordToVerify, storedHash) => { 10 | const [salt, key] = storedHash.split(':'); 11 | const derivedKey = crypto.scryptSync(passwordToVerify, salt, 64).toString('hex'); 12 | return key === derivedKey; 13 | }; 14 | -------------------------------------------------------------------------------- /server/utils/pubsub.js: -------------------------------------------------------------------------------- 1 | import { RedisPubSub } from 'graphql-redis-subscriptions'; 2 | import Redis from 'ioredis'; 3 | 4 | export const options = { 5 | host: process.env.REDIS_HOST, 6 | port: process.env.REDIS_PORT, 7 | connectTimeout: 10000, 8 | retryStrategy: times => Math.min(times * 50, 2000) 9 | }; 10 | 11 | export const pubsub = new RedisPubSub({ 12 | publisher: new Redis(options), 13 | subscriber: new Redis(options) 14 | }); 15 | -------------------------------------------------------------------------------- /server/utils/queue.js: -------------------------------------------------------------------------------- 1 | import { aggregateCheck } from '@server/cronJobs/aggregateJob'; 2 | import Bull from 'bull'; 3 | import moment from 'moment'; 4 | const queues = {}; 5 | 6 | export const QUEUE_NAMES = { 7 | SCHEDULED_JOB: 'scheduledJob', 8 | AGGREGATE_CHECK: 'aggregateCheck' 9 | }; 10 | 11 | export const QUEUE_PROCESSORS = { 12 | [QUEUE_NAMES.SCHEDULED_JOB]: (job, done) => { 13 | console.log(`${moment()}::Job with id: ${job.id} is being executed.\n`, { 14 | message: job.data.message 15 | }); 16 | done(); 17 | }, 18 | [QUEUE_NAMES.AGGREGATE_CHECK]: (job, done) => { 19 | console.log('Aggregate job is getting executed'); 20 | aggregateCheck(); 21 | done(); 22 | } 23 | }; 24 | 25 | const CRON_EXPRESSIONS = { 26 | MIDNIGHT: '0 0 * * *' 27 | }; 28 | 29 | export const initQueues = () => { 30 | console.log('init queues'); 31 | Object.keys(QUEUE_PROCESSORS).forEach(queueName => { 32 | queues[queueName] = getQueue(queueName); 33 | queues[queueName].process(QUEUE_PROCESSORS[queueName]); 34 | }); 35 | queues[QUEUE_NAMES.AGGREGATE_CHECK].add({}, { repeat: { cron: CRON_EXPRESSIONS.MIDNIGHT } }); 36 | }; 37 | export const getQueue = queueName => { 38 | if (!queues[queueName]) { 39 | queues[queueName] = new Bull(queueName, `redis://${process.env.REDIS_HOST}:${process.env.REDIS_PORT}`); 40 | console.log('created queue: ', queueName, `redis://${process.env.REDIS_HOST}:${process.env.REDIS_PORT}`); 41 | } 42 | return queues[queueName]; 43 | }; 44 | -------------------------------------------------------------------------------- /server/utils/testUtils/dbConfig.js: -------------------------------------------------------------------------------- 1 | export const DB_ENV = { 2 | DB_URI: 'postgres://reporting_dashboard_role:reportingdashboard123@localhost:5432/reporting_dashboard_dev', 3 | POSTGRES_HOST: 'host', 4 | POSTGRES_USER: 'user', 5 | POSTGRES_PASSWORD: 'password', 6 | POSTGRES_DB: 'table' 7 | }; 8 | -------------------------------------------------------------------------------- /server/utils/testUtils/mockData.js: -------------------------------------------------------------------------------- 1 | import range from 'lodash/range'; 2 | import faker from 'faker'; 3 | import { createPassword } from '../passwordUtils'; 4 | const createdBefore = parseInt(Math.random() * 1000); 5 | 6 | export const addressesTable = range(1, 10).map((_, index) => ({ 7 | id: (index + 1).toString(), 8 | address1: faker.address.streetName(), 9 | address2: faker.address.streetAddress(), 10 | city: faker.address.city(), 11 | country: faker.address.country(), 12 | latitude: faker.address.latitude(), 13 | longitude: faker.address.longitude() 14 | })); 15 | 16 | export const usersTable = range(1, 10).map((_, index) => ({ 17 | id: (index + 1).toString(), 18 | firstName: faker.name.firstName(), 19 | lastName: faker.name.lastName(), 20 | email: faker.internet.email(), 21 | password: createPassword(faker.internet.password()), 22 | created_at: faker.date.recent(createdBefore) 23 | })); 24 | 25 | export const productsTable = range(1, 10).map((_, index) => ({ 26 | id: (index + 1).toString(), 27 | name: faker.commerce.productName(), 28 | category: 'Sports', 29 | amount: faker.commerce.price() 30 | })); 31 | 32 | export const purchasedProductsTable = range(1, 10).map((_, index) => ({ 33 | id: (index + 1).toString(), 34 | productId: (index + 1).toString(), 35 | price: 500, 36 | discount: faker.datatype.number(20), 37 | deliveryDate: faker.date.recent(createdBefore), 38 | storeId: (index + 2).toString() 39 | })); 40 | 41 | export const storesTable = range(1, 10).map((_, index) => ({ 42 | id: (index + 1).toString(), 43 | name: faker.company.companyName(), 44 | addressId: index + 1 45 | })); 46 | 47 | export const storeProductsTable = range(1, 10).map((_, index) => ({ 48 | id: (index + 1).toString(), 49 | productId: index + 1, 50 | storeId: index + 1 51 | })); 52 | 53 | export const suppliersTable = range(1, 10).map((_, index) => ({ 54 | id: (index + 1).toString(), 55 | name: faker.company.companyName(), 56 | addressId: index + 1 57 | })); 58 | 59 | export const supplierProductsTable = range(1, 10).map((_, index) => ({ 60 | id: (index + 1).toString(), 61 | productId: index + 1, 62 | supplierId: index + 1 63 | })); 64 | -------------------------------------------------------------------------------- /server/utils/testUtils/testApp.js: -------------------------------------------------------------------------------- 1 | import express from 'express'; 2 | import { expressMiddleware } from '@apollo/server/express4'; 3 | import { GraphQLSchema } from 'graphql'; 4 | import dotenv from 'dotenv'; 5 | import { QueryRoot } from '@gql/queries'; 6 | import { MutationRoot } from '@gql/mutations'; 7 | import { client } from '@database'; 8 | import { SubscriptionRoot } from '@gql/subscriptions'; 9 | import { ApolloServer } from '@apollo/server'; 10 | import { logger } from '../index'; 11 | 12 | const connect = async () => { 13 | await client.authenticate(); 14 | }; 15 | 16 | connect(); 17 | 18 | // configure environment variables 19 | dotenv.config({ path: `.env.${process.env.ENVIRONMENT_NAME}` }); 20 | 21 | // create the graphQL schema 22 | const schema = new GraphQLSchema({ query: QueryRoot, mutation: MutationRoot, subscription: SubscriptionRoot }); 23 | 24 | const getTestApp = async () => { 25 | const testApp = express(); 26 | const server = new ApolloServer({ 27 | schema, 28 | formatError: e => { 29 | logger().info({ e }); 30 | return e.message; 31 | } 32 | }); 33 | await server.start(); 34 | 35 | testApp.use('/graphql', express.json(), expressMiddleware(server)); 36 | testApp.use('/', (_, response) => { 37 | response 38 | .status(200) 39 | .json({ message: 'OK' }) 40 | .send(); 41 | }); 42 | return testApp; 43 | }; 44 | 45 | export { getTestApp }; 46 | -------------------------------------------------------------------------------- /server/utils/tests/getAsyncIterator.test.js: -------------------------------------------------------------------------------- 1 | import { SUBSCRIPTION_TOPICS } from '../constants'; 2 | import { getAsyncInterator } from '../iterator'; 3 | import { pubsub } from '../pubsub'; 4 | 5 | describe('getAsyncInterator tests', () => { 6 | it('should call the async iterator with the given topic', () => { 7 | const spy = jest.spyOn(pubsub, 'asyncIterator'); 8 | getAsyncInterator(SUBSCRIPTION_TOPICS.NEW_PURCHASED_PRODUCT)(); 9 | expect(spy).toBeCalled(); 10 | }); 11 | }); 12 | -------------------------------------------------------------------------------- /server/utils/tests/gqlFieldUtils.test.js: -------------------------------------------------------------------------------- 1 | const { GraphQLID, GraphQLNonNull } = require('graphql'); 2 | const { getQueryFields, CREATE_AND_QUERY_REQUIRED_ARGS, TYPE_ATTRIBUTES } = require('../gqlFieldUtils'); 3 | 4 | describe('getQueryFields', () => { 5 | it('should return type wrapped in GraphQLNonNull', () => { 6 | const baseFields = { 7 | id: { 8 | type: GraphQLID, 9 | ...CREATE_AND_QUERY_REQUIRED_ARGS 10 | }, 11 | prop: { 12 | type: GraphQLID 13 | } 14 | }; 15 | const fields = getQueryFields(baseFields, TYPE_ATTRIBUTES.isCreateRequired); 16 | expect(fields.id.type).toEqual(new GraphQLNonNull(baseFields.id.type)); 17 | expect(fields.prop.type).toEqual(baseFields.prop.type); 18 | }); 19 | }); 20 | -------------------------------------------------------------------------------- /server/utils/tests/index.test.js: -------------------------------------------------------------------------------- 1 | import { stringifyWithCheck, addWhereClause, isTestEnv, totalConnectionFields, transformSQLError } from '@utils/index'; 2 | 3 | describe('isTestEnv', () => { 4 | it("should return true if the ENVIRONMENT_NAME is 'test'", () => { 5 | expect(isTestEnv()).toBe(true); 6 | }); 7 | }); 8 | 9 | describe('addWhereClause', () => { 10 | it("should construct the whereClause correctly'", () => { 11 | const where = addWhereClause('', 'A = B'); 12 | expect(where).toBe(' WHERE ( A = B ) '); 13 | expect(addWhereClause(where, 'A = B')).toBe(' WHERE ( A = B ) AND ( A = B ) '); 14 | }); 15 | }); 16 | 17 | describe('totalConnectionFields', () => { 18 | it('should get the fullcount', () => { 19 | expect(totalConnectionFields.connectionFields.total.resolve({ fullCount: 10 })).toBe(10); 20 | }); 21 | }); 22 | 23 | describe('stringifyWithCheck', () => { 24 | it('should return the strigified message', () => { 25 | const obj = { a: 'b' }; 26 | const res = stringifyWithCheck(obj); 27 | expect(res).toBe(JSON.stringify(obj)); 28 | }); 29 | it('should not throw an error if its not able to stringify the object', () => { 30 | const obj = { a: 'b' }; 31 | obj.obj = obj; 32 | const res = stringifyWithCheck(obj); 33 | expect(res).toBe('unable to unfurl message: [object Object]'); 34 | }); 35 | 36 | it('should stringify the data key if present in the message and unable to stringify the original value', () => { 37 | const obj = { a: 'b' }; 38 | obj.obj = obj; 39 | obj.data = { body: 'This is the real answer' }; 40 | const res = stringifyWithCheck(obj); 41 | expect(res).toBe(JSON.stringify(obj.data)); 42 | }); 43 | it('should join the database errors and return them', () => { 44 | const error = { 45 | errors: [ 46 | { 47 | message: 'This is sample error' 48 | } 49 | ] 50 | }; 51 | const res = transformSQLError(error); 52 | expect(res).toBe('This is sample error'); 53 | }); 54 | it('should return the original error if there is single error', () => { 55 | const error = { 56 | original: 'This is an error' 57 | }; 58 | const res = transformSQLError(error); 59 | expect(res).toBe('This is an error'); 60 | }); 61 | }); 62 | -------------------------------------------------------------------------------- /server/utils/tests/migrateUtils.test.js: -------------------------------------------------------------------------------- 1 | describe('migrationUtils tests', () => { 2 | jest.mock('shelljs', () => ({ 3 | ls: () => ['test_file_name_1', 'test_file_name_2'] 4 | })); 5 | jest.mock('fs', () => ({ 6 | readFileSync: () => {} 7 | })); 8 | it('should export a getVersion function that returns the version of the current migration', () => { 9 | const { getVersion } = require('./../migrateUtils'); 10 | const version = getVersion('test_file_name_2'); 11 | expect(version).toBe(2); 12 | }); 13 | it('should extract the version from the currentFile and pass the resources to query', () => { 14 | const fs = require('fs'); 15 | const fsSpy = jest.spyOn(fs, 'readFileSync'); 16 | const { migrate } = require('./../migrateUtils'); 17 | const query = jest.fn(async () => {}); 18 | const queryInterface = { 19 | sequelize: { 20 | query 21 | } 22 | }; 23 | migrate('test_file_name_2', queryInterface); 24 | const filesInResourcesVersion = ['test_file_name_1', 'test_file_name_2']; 25 | expect(fsSpy).toHaveBeenCalledWith(`./resources/v${2}/${filesInResourcesVersion[0]}`, 'utf-8'); 26 | }); 27 | it('should have a migrate function that calls query for all migrations', () => { 28 | const { migrate } = require('./../migrateUtils'); 29 | const query = jest.fn(async () => {}); 30 | const queryInterface = { 31 | sequelize: { 32 | query 33 | } 34 | }; 35 | const result = migrate('test_file_name_2', queryInterface); 36 | expect(result); 37 | expect(query).toHaveBeenCalled(); 38 | }); 39 | it('should have a migrate function that catches a sql error if the migration fails', async () => { 40 | const sqlError = new Error(); 41 | sqlError.original = { 42 | sqlMessage: { 43 | startsWith: () => true, 44 | endsWith: () => true 45 | } 46 | }; 47 | const query = jest.fn(() => new Promise((resolve, reject) => reject(sqlError))); 48 | const { migrate } = require('./../migrateUtils'); 49 | const queryInterface = { 50 | sequelize: { 51 | query 52 | } 53 | }; 54 | expect(await migrate('test_file_name_2', queryInterface)).toBeFalsy(); 55 | }); 56 | }); 57 | -------------------------------------------------------------------------------- /server/utils/tests/passwordUtils.test.js: -------------------------------------------------------------------------------- 1 | import { createPassword, checkPassword } from '../passwordUtils'; 2 | import crypto from 'crypto'; 3 | 4 | describe('createPassword tests', () => { 5 | it('should ensure that it create correct password', () => { 6 | const password = '1234'; 7 | const salt = crypto.randomBytes(16).toString('hex'); 8 | const hashedPassword = crypto.scryptSync(password, salt, 64).toString('hex'); 9 | expect(createPassword(password).length).toEqual(`${salt}:${hashedPassword}`.length); 10 | }); 11 | }); 12 | 13 | describe('check password tests', () => { 14 | const password = '1234'; 15 | const salt = crypto.randomBytes(16).toString('hex'); 16 | const hashedPassword = `${salt}:${crypto.scryptSync(password, salt, 64).toString('hex')}`; 17 | 18 | it('should ensure it returns true when password is correct', async () => { 19 | expect(await checkPassword(password, hashedPassword)).toBeTruthy(); 20 | }); 21 | it('should ensure it returns false when password is incorrect', async () => { 22 | expect(await checkPassword('123', hashedPassword)).toBeFalsy(); 23 | expect(await checkPassword('', hashedPassword)).toBeFalsy(); 24 | }); 25 | }); 26 | -------------------------------------------------------------------------------- /server/utils/tests/pubsub.test.js: -------------------------------------------------------------------------------- 1 | describe('Pubsub tests', () => { 2 | it('should create a new Pubsub', () => { 3 | const graphqlSubscription = require('graphql-redis-subscriptions'); 4 | const spy = jest.spyOn(graphqlSubscription, 'RedisPubSub'); 5 | require('@utils/pubsub'); 6 | expect(spy).toBeCalledTimes(1); 7 | }); 8 | it('should set the retry stratergy ', () => { 9 | const module = require('@utils/pubsub'); 10 | const spy = jest.spyOn(module.options, 'retryStrategy'); 11 | const res = spy(3); 12 | expect(res).toEqual(150); 13 | }); 14 | }); 15 | -------------------------------------------------------------------------------- /server/utils/tests/queue.test.js: -------------------------------------------------------------------------------- 1 | import { getQueue, initQueues, QUEUE_PROCESSORS } from '@utils/queue'; 2 | import moment from 'moment'; 3 | import * as queue from '@utils/queue'; 4 | 5 | describe('Queue tests', () => { 6 | it('getQueues should create a queue if not present', async () => { 7 | const sampleQueue = 'sampleQueue'; 8 | const res = await getQueue(sampleQueue); 9 | expect(res.data).toEqual(sampleQueue); 10 | }); 11 | it('should return the queue if already present ', async () => { 12 | jest.spyOn(queue, 'getQueue').mockImplementation(name => { 13 | const queueName = name; 14 | const queues = {}; 15 | queues[queueName] = { 16 | data: 'This is sample queue', 17 | process: jest.fn() 18 | }; 19 | return queues[name]; 20 | }); 21 | const res = await getQueue('sampleQueue'); 22 | expect(res.data).toBe('This is sample queue'); 23 | jest.spyOn(queue, 'getQueue').mockClear(); 24 | }); 25 | it('should initialize the queues', async () => { 26 | jest.spyOn(QUEUE_PROCESSORS, 'scheduledJob').mockImplementation(() => ({ 27 | sampleQueue: job => ({ 28 | message: job.message 29 | }) 30 | })); 31 | jest.spyOn(console, 'log'); 32 | await initQueues(); 33 | expect(console.log.mock.calls[0][0]).toBe('init queues'); 34 | jest.spyOn(console, 'log').mockClear(); 35 | }); 36 | 37 | describe('Queue processes tests', () => { 38 | beforeAll(() => { 39 | jest.restoreAllMocks(); 40 | }); 41 | it('should console the job id if a job is getting executed', () => { 42 | jest.resetModules(); 43 | jest.spyOn(console, 'log'); 44 | initQueues(); 45 | expect(console.log.mock.calls.length).toBe(5); 46 | expect(console.log.mock.calls[1][0]).toBe(`${moment()}::Job with id: 1 is being executed.\n`); 47 | expect(console.log.mock.calls[2][0]).toBe('done'); 48 | }); 49 | }); 50 | }); 51 | -------------------------------------------------------------------------------- /server/utils/tests/token.test.js: -------------------------------------------------------------------------------- 1 | import { Token } from '../token'; 2 | 3 | describe('token tests', () => { 4 | const OLD_ENV = process.env; 5 | const keys = { 6 | ACCESS_TOKEN_SECRET: '4cd7234152590dcfe77e1b6fc52e84f4d30c06fddadd0dd2fb42cb' 7 | }; 8 | 9 | beforeEach(() => { 10 | process.env = { ...OLD_ENV, ...keys }; 11 | }); 12 | afterAll(() => { 13 | process.env = OLD_ENV; 14 | }); 15 | it('should return error if there is not any user', () => { 16 | const user = { id: 1 }; 17 | const signedToken = new Token({ user }).get(); 18 | expect(signedToken).not.toBeUndefined(); 19 | }); 20 | }); 21 | -------------------------------------------------------------------------------- /server/utils/tests/transformerUtils.test.js: -------------------------------------------------------------------------------- 1 | import { transformDbArrayResponseToRawResponse, convertDbResponseToRawResponse } from '../transformerUtils'; 2 | 3 | describe('transformerUtils tests', () => { 4 | const response = [ 5 | { 6 | products: { 7 | dataValues: { 8 | category: 'Shoes' 9 | } 10 | }, 11 | get: () => 'Shoes' 12 | }, 13 | { 14 | products: { 15 | dataValues: { 16 | category: 'Health' 17 | } 18 | }, 19 | get: () => 'Health' 20 | } 21 | ]; 22 | it('should transform the array response to raw response ', () => { 23 | const { transformDbArrayResponseToRawResponse } = require('../transformerUtils'); 24 | const res = transformDbArrayResponseToRawResponse(response); 25 | expect(res).toEqual([response[0].products.dataValues.category, response[1].products.dataValues.category]); 26 | }); 27 | it('should throw error if the response passed in argument is not an object', () => { 28 | expect(transformDbArrayResponseToRawResponse).toThrowError('The required type should be an object(array)'); 29 | }); 30 | 31 | describe('convertDbResponseToRawResponse tests', () => { 32 | it('should call the get method on the response', () => { 33 | const res = convertDbResponseToRawResponse(response[0]); 34 | expect(res).toEqual(response[0].products.dataValues.category); 35 | }); 36 | }); 37 | }); 38 | -------------------------------------------------------------------------------- /server/utils/token.js: -------------------------------------------------------------------------------- 1 | import jwt from 'jsonwebtoken'; 2 | import { logger } from '@utils'; 3 | 4 | export class Token { 5 | static secret = process.env.ACCESS_TOKEN_SECRET; 6 | expiresIn = '1d'; 7 | 8 | constructor({ user, overrideExpiration }) { 9 | if (overrideExpiration) { 10 | this.expiresIn = overrideExpiration; 11 | } 12 | if (!user) { 13 | logger().error('Token::constructor::user_not_found'); 14 | return; 15 | } 16 | this.user = user; 17 | } 18 | 19 | get() { 20 | const token = jwt.sign( 21 | { 22 | user: this.user 23 | }, 24 | process.env.ACCESS_TOKEN_SECRET, 25 | { 26 | expiresIn: this.expiresIn 27 | } 28 | ); 29 | logger().debug(`Token::get::${token}`); 30 | return token; 31 | } 32 | } 33 | -------------------------------------------------------------------------------- /server/utils/transformerUtils.js: -------------------------------------------------------------------------------- 1 | import { isArray, snakeCase } from 'lodash'; 2 | import mapKeysDeep from 'map-keys-deep'; 3 | 4 | export const convertDbResponseToRawResponse = dbResponse => 5 | dbResponse.get({ 6 | plain: true, 7 | raw: true 8 | }); 9 | 10 | export const transformDbArrayResponseToRawResponse = arr => { 11 | if (!isArray(arr)) { 12 | throw new Error('The required type should be an object(array)'); 13 | } else { 14 | return arr.map(resource => mapKeysDeep(convertDbResponseToRawResponse(resource), keys => snakeCase(keys))); 15 | } 16 | }; 17 | -------------------------------------------------------------------------------- /sonar-project.properties: -------------------------------------------------------------------------------- 1 | sonar.organization=wednesday-solutions 2 | sonar.projectKey=wednesday-solutions_node-express-graphql-template_AYbhJQVowZhMDp9FLuck 3 | 4 | sonar.language=js 5 | sonar.sources=server 6 | sonar.tests=server 7 | sonar.exclusions=dist,resources,migrations,seeders,backup,assets, server/utils/testUtils/**,server/database/models/** 8 | sonar.test.exclusions= migrations/*.js,*./server/database/models/**,*./server/utils/testUtils/**,*./server/**/*.js 9 | sonar.test.inclusions=**/*.test.js 10 | 11 | sonar.javascript.lcov.reportPaths=./coverage/lcov.info 12 | sonar.testExecutionReportPaths=./reports/test-report.xml 13 | sonar.sourceEncoding=UTF-8 14 | -------------------------------------------------------------------------------- /webpack.dev.config.js: -------------------------------------------------------------------------------- 1 | /** 2 | * DEVELOPMENT WEBPACK CONFIGURATION 3 | */ 4 | 5 | const path = require('path'); 6 | const webpack = require('webpack'); 7 | 8 | module.exports = require('./webpack.server.config')({ 9 | mode: 'development', 10 | // Add hot reloading in development 11 | entry: ['webpack-hot-middleware/client?reload=true', path.join(process.cwd(), 'server/index.js')], 12 | // Don't use hashes in dev mode for better performance 13 | 14 | // Add development plugins 15 | plugins: [ 16 | new webpack.HotModuleReplacementPlugin() // Tell webpack we want hot reloading 17 | ] 18 | }); 19 | -------------------------------------------------------------------------------- /webpack.prod.config.js: -------------------------------------------------------------------------------- 1 | // Important modules this config uses 2 | const path = require('path'); 3 | 4 | module.exports = require('./webpack.server.config')({ 5 | mode: 'production', 6 | entry: [path.join(process.cwd(), 'server/index.js')], 7 | 8 | plugins: [] 9 | }); 10 | --------------------------------------------------------------------------------