├── .nvmrc ├── .dockerignore ├── server ├── translations │ ├── en.json │ └── hi.json ├── middlewares │ ├── rateLimiter │ │ └── index.js │ ├── auth │ │ ├── utils.js │ │ ├── ownershipBasedAccessControl.js │ │ └── index.js │ └── injectRequestId │ │ └── index.js ├── services │ ├── redis.js │ └── circuitBreaker.js ├── utils │ ├── random.js │ ├── logger.js │ ├── i18n │ │ └── message.js │ ├── mongoConstants.js │ ├── slackNotify.js │ ├── constants.js │ ├── custom │ │ └── scheduleJob.js │ ├── auth0.js │ ├── apiUtils.js │ ├── index.js │ ├── queue.js │ ├── mockData.js │ ├── routeLister.js │ └── swagUtils.js ├── api │ ├── users │ │ ├── index.js │ │ └── validator.js │ ├── unshardedOrders │ │ └── index.js │ ├── products │ │ └── index.js │ ├── orders │ │ ├── validator.js │ │ ├── updateRedis.js │ │ └── index.js │ ├── login │ │ ├── validator.js │ │ └── index.js │ ├── aggregate │ │ └── orders │ │ │ ├── validator.js │ │ │ └── index.js │ ├── cronJob │ │ ├── validator.js │ │ ├── index.js │ │ └── aggregateJob.js │ ├── referencedOrders │ │ └── index.js │ ├── unshardedReferencedOrders │ │ └── index.js │ ├── assignRoles │ │ ├── validator.js │ │ └── index.js │ ├── roles │ │ ├── validator.js │ │ └── index.js │ ├── routes │ │ └── index.js │ ├── index.js │ ├── utils.js │ ├── customApisMapper.js │ └── requestGenerators.js ├── database │ ├── models │ │ ├── suppliers.js │ │ ├── referencedOrders.js │ │ ├── unshardedOrders.js │ │ ├── stores.js │ │ ├── orders.js │ │ ├── unshardedReferencedOrders.js │ │ ├── products.js │ │ ├── storeProducts.js │ │ ├── supplierProducts.js │ │ └── users.js │ └── mongo.js ├── daos │ ├── product.js │ └── order.js └── index.js ├── .travis.yml ├── __tests__ ├── __load__ │ ├── libs │ │ ├── shim │ │ │ ├── urijs.js │ │ │ ├── lodash.js │ │ │ ├── cheerio.js │ │ │ ├── crypto-js.js │ │ │ ├── full.js │ │ │ ├── expect.js │ │ │ ├── xml2Json.js │ │ │ └── jsonSchema.js │ │ └── spo-gpo.js │ └── script.js └── server │ ├── utils │ ├── swagUtils.test.js │ ├── slackNotify.test.js │ ├── custom │ │ └── scheduleJob.test.js │ ├── index.test.js │ └── queue.test.js │ ├── index.test.js │ ├── middlewares │ ├── rateLimiter │ │ └── index.test.js │ └── auth │ │ ├── index.test.js │ │ ├── ownershipBasedAccessControl.test.js │ │ └── paths.test.js │ ├── api │ ├── products │ │ └── index.test.js │ ├── cronJob │ │ ├── index.test.js │ │ └── aggregateJob.test.js │ ├── login │ │ └── index.test.js │ ├── referencedOrders │ │ └── index.test.js │ ├── unshardedReferencedOrders │ │ └── index.test.js │ ├── roles │ │ └── index.test.js │ ├── assignRoles │ │ └── index.test.js │ ├── users │ │ └── index.test.js │ └── aggregate │ │ └── orders │ │ └── index.test.js │ ├── database │ └── mongo.test.js │ ├── services │ └── circuitBreaker.test.js │ └── daos │ ├── product.test.js │ └── order.test.js ├── .babelrc ├── .prettierrc ├── seeders ├── seed.sh ├── unsharded.js ├── unshardedReferenced.js ├── referenced.js ├── utils.js └── index.js ├── setup-shards ├── scripts │ ├── setup │ │ ├── cleanup-files.sh │ │ ├── base.sh │ │ ├── config-server.sh │ │ ├── shard1.sh │ │ ├── shard2.sh │ │ ├── shard3.sh │ │ ├── shard4.sh │ │ ├── docker-containers.sh │ │ └── create-sharded-collections.sh │ └── teardown.sh ├── mongos │ └── docker-compose.yml ├── shardsvr1 │ └── docker-compose.yml ├── shardsvr2 │ └── docker-compose.yml ├── shardsvr3 │ └── docker-compose.yml ├── shardsvr4 │ └── docker-compose.yml ├── docker-compose.yml ├── configsvr │ └── docker-compose.yml └── README.md ├── nodemon.json ├── makefile ├── webpack ├── production.config.js ├── dev.config.js └── server.config.js ├── __mocks__ └── bull.js ├── .github ├── pull_request_template.md └── workflows │ ├── ci.yml │ ├── coverage-report-ci.yml │ ├── cd.yml │ ├── prod-release.yml │ └── beta-release.yml ├── postman └── parcel-node-mongo.postman_environment.json ├── Dockerfile ├── config └── index.js ├── jest.setup.js ├── docker-compose.yml ├── .env.example ├── .env.test ├── jest.config.json ├── Licence ├── badges ├── badge-lines.svg ├── badge-branches.svg ├── badge-functions.svg └── badge-statements.svg ├── .eslintignore ├── .gitignore ├── .eslintrc.js ├── package.json └── README.md /.nvmrc: -------------------------------------------------------------------------------- 1 | v14.17 2 | -------------------------------------------------------------------------------- /.dockerignore: -------------------------------------------------------------------------------- 1 | node_modules -------------------------------------------------------------------------------- /server/translations/en.json: -------------------------------------------------------------------------------- 1 | { 2 | "response.health_check": "node-express-mongo server at your service🖖" 3 | } 4 | -------------------------------------------------------------------------------- /server/translations/hi.json: -------------------------------------------------------------------------------- 1 | { 2 | "response.health_check": "नोड-एक्सप्रेस-मोंगो सर्वर आपकी सेवा में🖖" 3 | } 4 | -------------------------------------------------------------------------------- /.travis.yml: -------------------------------------------------------------------------------- 1 | language: node_js 2 | 3 | node_js: 4 | - "8" 5 | 6 | install: 7 | - npm install 8 | script: 9 | - npm run test -------------------------------------------------------------------------------- /server/middlewares/rateLimiter/index.js: -------------------------------------------------------------------------------- 1 | import rateLimit from 'express-rate-limit'; 2 | 3 | export const rateLimiter = options => rateLimit(options); 4 | -------------------------------------------------------------------------------- /server/services/redis.js: -------------------------------------------------------------------------------- 1 | import Redis from 'ioredis'; 2 | 3 | export const redis = new Redis( 4 | process.env.REDIS_PORT, 5 | process.env.REDIS_DOMAIN 6 | ); 7 | -------------------------------------------------------------------------------- /__tests__/__load__/libs/shim/urijs.js: -------------------------------------------------------------------------------- 1 | /* global postman */ 2 | 3 | import URI from '../urijs.js'; 4 | 5 | const Extend = Symbol.for('extend'); 6 | 7 | postman[Extend].module.urijs = URI; 8 | -------------------------------------------------------------------------------- /__tests__/__load__/libs/shim/lodash.js: -------------------------------------------------------------------------------- 1 | /* global postman */ 2 | 3 | import lodash from '../lodash.js'; 4 | 5 | const Extend = Symbol.for('extend'); 6 | 7 | postman[Extend].module.lodash = lodash; 8 | -------------------------------------------------------------------------------- /__tests__/__load__/libs/shim/cheerio.js: -------------------------------------------------------------------------------- 1 | /* global postman */ 2 | 3 | import cheerio from '../cheerio.js'; 4 | 5 | const Extend = Symbol.for('extend'); 6 | 7 | postman[Extend].module.cheerio = cheerio; 8 | -------------------------------------------------------------------------------- /__tests__/__load__/libs/shim/crypto-js.js: -------------------------------------------------------------------------------- 1 | /* global postman */ 2 | 3 | import cryptoJs from '../crypto-js.js'; 4 | 5 | const Extend = Symbol.for('extend'); 6 | 7 | postman[Extend].module['crypto-js'] = cryptoJs; 8 | -------------------------------------------------------------------------------- /__tests__/__load__/libs/shim/full.js: -------------------------------------------------------------------------------- 1 | import './core'; 2 | import './cheerio'; 3 | import './crypto-js.js'; 4 | import './expect.js'; 5 | import './jsonSchema.js'; 6 | import './lodash.js'; 7 | import './xml2Json'; 8 | -------------------------------------------------------------------------------- /server/utils/random.js: -------------------------------------------------------------------------------- 1 | export function randomiser(arr, numberOfElements) { 2 | const shuffled = arr.sort(() => 0.5 - Math.random()); 3 | let selected = shuffled.slice(0, numberOfElements); 4 | return selected; 5 | } 6 | -------------------------------------------------------------------------------- /server/api/users/index.js: -------------------------------------------------------------------------------- 1 | import { generateCreateUserRequest } from 'api/requestGenerators'; 2 | 3 | export const createUser = (router, model, validator) => { 4 | generateCreateUserRequest({ router, model, validator }); 5 | }; 6 | -------------------------------------------------------------------------------- /.babelrc: -------------------------------------------------------------------------------- 1 | { 2 | "presets": [ 3 | [ 4 | "@babel/preset-env", 5 | { 6 | "useBuiltIns": "usage", 7 | "corejs": 3 8 | } 9 | ] 10 | ] 11 | } 12 | -------------------------------------------------------------------------------- /.prettierrc: -------------------------------------------------------------------------------- 1 | { 2 | "printWidth": 80, 3 | "tabWidth": 4, 4 | "useTabs": false, 5 | "semi": true, 6 | "singleQuote": true, 7 | "trailingComma": "none", 8 | "arrowParens": "avoid", 9 | "endOfLine": "auto" 10 | } 11 | -------------------------------------------------------------------------------- /server/api/unshardedOrders/index.js: -------------------------------------------------------------------------------- 1 | import { generateFetchAllRequest } from 'api/requestGenerators'; 2 | 3 | export const fetchAllUnshardedOrders = async (router, model, validator) => { 4 | generateFetchAllRequest({ router, model }); 5 | }; 6 | -------------------------------------------------------------------------------- /seeders/seed.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | export DIVISOR=$1 3 | set -o allexport 4 | source .env.local 5 | set +o allexport 6 | 7 | node seeders/index.js false & 8 | node seeders/referenced.js false & 9 | node seeders/unsharded.js false & 10 | node seeders/unshardedReferenced.js false -------------------------------------------------------------------------------- /setup-shards/scripts/setup/cleanup-files.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | echo "cleanup files------------------------" 3 | rm $(pwd)/mongos.txt 4 | rm $(pwd)/shard4rs.txt 5 | rm $(pwd)/shard3rs.txt 6 | rm $(pwd)/shard2rs.txt 7 | rm $(pwd)/shard1rs.txt 8 | rm $(pwd)/configsvr.txt 9 | rm $(pwd)/setup.txt -------------------------------------------------------------------------------- /server/utils/logger.js: -------------------------------------------------------------------------------- 1 | import bunyan from 'bunyan'; 2 | 3 | export default global.log = bunyan.createLogger({ 4 | name: 'node-mongo-express', 5 | streams: [ 6 | { 7 | level: 'info', 8 | stream: process.stdout 9 | } 10 | ] 11 | }); 12 | -------------------------------------------------------------------------------- /server/api/products/index.js: -------------------------------------------------------------------------------- 1 | import { getAllCategories } from 'daos/product'; 2 | 3 | export const getCategories = async () => { 4 | try { 5 | const categories = await getAllCategories(); 6 | return categories; 7 | } catch (err) { 8 | throw err; 9 | } 10 | }; 11 | -------------------------------------------------------------------------------- /server/utils/i18n/message.js: -------------------------------------------------------------------------------- 1 | export default { 2 | // Common Error Messages 3 | RESOURCE_NOT_FOUND: 'Resource Not Found', 4 | ACCESS_DENIED: 'Access Denied', 5 | INTERNAL_SERVER_ERROR: 'Internal Server Error', 6 | BAD_REQUEST: 'Bad Request', 7 | UNAUTHORIZIED: 'Unauthorized' 8 | }; 9 | -------------------------------------------------------------------------------- /__tests__/__load__/libs/shim/expect.js: -------------------------------------------------------------------------------- 1 | /* global postman */ 2 | 3 | import chai from '../chai.js'; 4 | 5 | const Extend = Symbol.for('extend'); 6 | 7 | Object.assign(postman[Extend], { 8 | AssertionError: chai.AssertionError, 9 | 10 | expect(value) { 11 | return chai.expect(value); 12 | }, 13 | }); 14 | -------------------------------------------------------------------------------- /nodemon.json: -------------------------------------------------------------------------------- 1 | { 2 | "exec": "babel-node server/index.js | bunyan -o short", 3 | "watch": ["server/*"], 4 | "ignore": ["**/__tests__/**", "*.test.js"], 5 | "events": { 6 | "restart": "kill-port 9000", 7 | "crash": "kill-port 9000 && yarn start" 8 | }, 9 | "delay": "250" 10 | } 11 | -------------------------------------------------------------------------------- /server/api/orders/validator.js: -------------------------------------------------------------------------------- 1 | const { checkSchema } = require('express-validator'); 2 | export default checkSchema({ 3 | totalPrice: { 4 | in: ['body'], 5 | errorMessage: 'totalPrice must be present', 6 | isFloat: { 7 | errorMessage: 'totalPrice should be a number' 8 | } 9 | } 10 | }); 11 | -------------------------------------------------------------------------------- /server/utils/mongoConstants.js: -------------------------------------------------------------------------------- 1 | function getMongoOptions() { 2 | return '?readPreference=secondary'; 3 | } 4 | function getMongoUri() { 5 | return `mongodb://${process.env.MONGO_BASE_URI}:${process.env.MONGO_PORT}/${ 6 | process.env.MONGO_DB_NAME 7 | }${getMongoOptions()}`; 8 | } 9 | module.exports = { getMongoUri }; 10 | -------------------------------------------------------------------------------- /__tests__/__load__/libs/shim/xml2Json.js: -------------------------------------------------------------------------------- 1 | import xml2js from '../xml2js.js'; 2 | 3 | function xml2Json(xml) { 4 | let json; 5 | xml2js.parseString(xml, { async: false }, (err, result) => { 6 | if (err) { 7 | throw err; 8 | } 9 | json = result; 10 | }); 11 | return json; 12 | } 13 | 14 | global.xml2Json = xml2Json; 15 | -------------------------------------------------------------------------------- /makefile: -------------------------------------------------------------------------------- 1 | docker: 2 | docker-compose --env-file ./.env.docker \ 3 | -f docker-compose.yml \ 4 | -f docker-compose.yml down 5 | 6 | docker-compose --env-file ./.env.docker \ 7 | -f docker-compose.yml \ 8 | -f docker-compose.yml build 9 | 10 | docker-compose --env-file ./.env.docker \ 11 | -f docker-compose.yml \ 12 | -f docker-compose.yml up -------------------------------------------------------------------------------- /server/utils/slackNotify.js: -------------------------------------------------------------------------------- 1 | import slackNotify from 'slack-notify'; 2 | 3 | let slack; 4 | 5 | export async function notifySlack(username, message) { 6 | if (!slack) { 7 | slack = slackNotify(process.env.SLACK_WEBHOOK_URL); 8 | } 9 | 10 | slack.send({ 11 | text: JSON.stringify(message), 12 | username 13 | }); 14 | } 15 | -------------------------------------------------------------------------------- /server/middlewares/auth/utils.js: -------------------------------------------------------------------------------- 1 | import config from 'config'; 2 | import { ownershipBasedAccessControl } from './ownershipBasedAccessControl'; 3 | 4 | export const authMiddlewareFunc = async (req, model, configObj) => 5 | await ownershipBasedAccessControl( 6 | req.user[`${config().apiAudience}/email`], 7 | model, 8 | configObj 9 | ); 10 | -------------------------------------------------------------------------------- /server/api/login/validator.js: -------------------------------------------------------------------------------- 1 | import { checkSchema } from 'express-validator'; 2 | 3 | export default checkSchema({ 4 | username: { 5 | in: ['body'], 6 | errorMessage: 'username must be present', 7 | isString: true 8 | }, 9 | password: { 10 | in: ['body'], 11 | errorMessage: 'password must be present', 12 | isString: true 13 | } 14 | }); 15 | -------------------------------------------------------------------------------- /webpack/production.config.js: -------------------------------------------------------------------------------- 1 | const path = require('path'); 2 | const TerserPlugin = require('terser-webpack-plugin'); 3 | 4 | module.exports = require('./server.config')({ 5 | mode: 'production', 6 | entry: [path.join(process.cwd(), '/server/index.js')], 7 | plugins: [], 8 | optimization: { 9 | minimize: true, 10 | minimizer: [new TerserPlugin()] 11 | } 12 | }); 13 | -------------------------------------------------------------------------------- /__tests__/server/utils/swagUtils.test.js: -------------------------------------------------------------------------------- 1 | import supertest from 'supertest'; 2 | import app from 'server'; 3 | import { SWAGGER_DOCS_PATH } from 'utils/swagUtils'; 4 | 5 | describe('swagUtils tests', () => { 6 | it('should reguster swagger ui in express app', async () => { 7 | const res = await supertest(app).get(SWAGGER_DOCS_PATH); 8 | expect(res.statusCode).toBe(200); 9 | }); 10 | }); 11 | -------------------------------------------------------------------------------- /__mocks__/bull.js: -------------------------------------------------------------------------------- 1 | export default function (msg) { 2 | const done = () => { 3 | console.log('done'); 4 | }; 5 | const job = { 6 | id: 1, 7 | data: { 8 | message: 'This is a sample job' 9 | } 10 | }; 11 | return { 12 | data: msg, 13 | process: fn => fn(job, done), 14 | add: (name, repeat) => Promise.resolve(name && repeat) 15 | }; 16 | } 17 | -------------------------------------------------------------------------------- /server/database/models/suppliers.js: -------------------------------------------------------------------------------- 1 | const mongoose = require('mongoose'); 2 | const { schema: userSchema } = require('database/models/users'); 3 | const schema = new mongoose.Schema({ 4 | name: { 5 | type: String, 6 | required: true 7 | }, 8 | admin: [userSchema] 9 | }); 10 | 11 | const Suppliers = mongoose.model('suppliers', schema); 12 | module.exports = { model: Suppliers, Suppliers, schema }; 13 | -------------------------------------------------------------------------------- /__tests__/server/index.test.js: -------------------------------------------------------------------------------- 1 | import supertest from 'supertest'; 2 | import app from 'server'; 3 | 4 | describe('app tests', () => { 5 | it('should have GET router at /', async () => { 6 | const res = await supertest(app).get('/'); 7 | expect(res.statusCode).toBe(200); 8 | expect(res.body.data).toContain( 9 | 'node-express-mongo server at your service🖖' 10 | ); 11 | }); 12 | }); 13 | -------------------------------------------------------------------------------- /server/database/models/referencedOrders.js: -------------------------------------------------------------------------------- 1 | const mongoose = require('mongoose'); 2 | const schema = new mongoose.Schema({ 3 | purchasedProducts: [{ ref: 'products', type: 'ObjectId' }], 4 | totalPrice: { 5 | type: Number, 6 | required: true 7 | } 8 | }); 9 | 10 | const ReferencedOrders = mongoose.model('referencedOrders', schema); 11 | 12 | module.exports = { model: ReferencedOrders, ReferencedOrders, schema }; 13 | -------------------------------------------------------------------------------- /setup-shards/scripts/teardown.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | docker-compose -f setup-shards/configsvr/docker-compose.yml down 4 | docker-compose -f setup-shards/shardsvr1/docker-compose.yml down 5 | docker-compose -f setup-shards/mongos/docker-compose.yml down 6 | docker-compose -f setup-shards/shardsvr2/docker-compose.yml down 7 | docker-compose -f setup-shards/shardsvr3/docker-compose.yml down 8 | docker-compose -f setup-shards/shardsvr4/docker-compose.yml down -------------------------------------------------------------------------------- /server/utils/constants.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | // This date indicates when the mutations on createPurchasedProduct went live. We will not have to recalculate aggregate from database after this date. 3 | REDIS_IMPLEMENTATION_DATE: '2022-04-05T00:00:00.000Z', 4 | SCOPE_TYPE: { 5 | ADMIN: 'ADMIN', 6 | SUPER_ADMIN: 'SUPER_ADMIN', 7 | STORE_ADMIN: 'STORE_ADMIN', 8 | SUPPLIER_ADMIN: 'SUPPLIER_ADMIN' 9 | } 10 | }; 11 | -------------------------------------------------------------------------------- /.github/pull_request_template.md: -------------------------------------------------------------------------------- 1 | ### Ticket Link 2 | 3 | --- 4 | 5 | ### Related Links 6 | 7 | --- 8 | 9 | ### Description 10 | 11 | --- 12 | 13 | ### Steps to Reproduce / Test 14 | 15 | --- 16 | 17 | --- 18 | 19 | ### Checklist 20 | 21 | - [ ] PR description included 22 | - [ ] `yarn test` passes 23 | - [ ] Tests are [changed or added] 24 | - [ ] Relevant documentation is changed or added (and PR referenced) 25 | 26 | ### GIF's 27 | 28 | --- 29 | -------------------------------------------------------------------------------- /server/api/aggregate/orders/validator.js: -------------------------------------------------------------------------------- 1 | const { checkSchema } = require('express-validator'); 2 | export default checkSchema({ 3 | date: { 4 | in: ['query'], 5 | isISO8601: { 6 | errorMessage: 'Add a valid date' 7 | } 8 | }, 9 | category: { 10 | in: ['query'], 11 | optional: true, 12 | isString: { 13 | errorMessage: 'category must be of type string' 14 | } 15 | } 16 | }); 17 | -------------------------------------------------------------------------------- /server/database/models/unshardedOrders.js: -------------------------------------------------------------------------------- 1 | const mongoose = require('mongoose'); 2 | const { schema: ProductSchema } = require('./products'); 3 | const schema = new mongoose.Schema({ 4 | purchasedProducts: [ProductSchema], 5 | totalPrice: { 6 | type: Number, 7 | required: true 8 | } 9 | }); 10 | 11 | const UnshardedOrders = mongoose.model('unshardedOrders', schema); 12 | 13 | module.exports = { model: UnshardedOrders, UnshardedOrders, schema }; 14 | -------------------------------------------------------------------------------- /server/middlewares/injectRequestId/index.js: -------------------------------------------------------------------------------- 1 | import { v4 as uuid } from 'uuid'; 2 | import log from 'utils/logger'; 3 | 4 | function RequestIdInjectedLogger(options, requestId) { 5 | this.log = options.log.child({ requestId }); 6 | } 7 | 8 | export const injectRequestId = () => (_request, _response, next) => { 9 | const requestId = uuid(); 10 | const logger = new RequestIdInjectedLogger({ log }, requestId); 11 | global.log = logger.log; 12 | next(); 13 | }; 14 | -------------------------------------------------------------------------------- /setup-shards/scripts/setup/base.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | # get IP 3 | export ip=`ipconfig getifaddr en0` 4 | ./setup-shards/scripts/setup/docker-containers.sh 5 | ./setup-shards/scripts/setup/config-server.sh 6 | ./setup-shards/scripts/setup/shard1.sh 7 | ./setup-shards/scripts/setup/shard2.sh 8 | ./setup-shards/scripts/setup/shard3.sh 9 | ./setup-shards/scripts/setup/shard4.sh 10 | ./setup-shards/scripts/setup/create-sharded-collections.sh 11 | ./setup-shards/scripts/setup/cleanup-files.sh 12 | 13 | -------------------------------------------------------------------------------- /server/database/models/stores.js: -------------------------------------------------------------------------------- 1 | const mongoose = require('mongoose'); 2 | const { schema: userSchema } = require('database/models/users'); 3 | const schema = new mongoose.Schema({ 4 | name: { 5 | type: String, 6 | required: true 7 | }, 8 | address: { 9 | type: String, 10 | required: true 11 | }, 12 | admin: [userSchema] 13 | }); 14 | 15 | const Stores = mongoose.model('stores', schema); 16 | 17 | module.exports = { model: Stores, Stores, schema }; 18 | -------------------------------------------------------------------------------- /setup-shards/mongos/docker-compose.yml: -------------------------------------------------------------------------------- 1 | version: "3.8" 2 | services: 3 | mongos: 4 | restart: on-failure 5 | container_name: mongos 6 | image: mongo 7 | command: 8 | [ 9 | mongos, 10 | --configdb, 11 | "cfgrs/${ip}:30001,${ip}:30002,${ip}:30003", 12 | --bind_ip, 13 | 0.0.0.0, 14 | --port, 15 | "27017", 16 | ] 17 | ports: 18 | - 60000:27017 19 | volumes: 20 | - mongos1:/data/db 21 | volumes: 22 | mongos1: {} -------------------------------------------------------------------------------- /server/api/cronJob/validator.js: -------------------------------------------------------------------------------- 1 | const { checkSchema } = require('express-validator'); 2 | export default checkSchema({ 3 | scheduleIn: { 4 | in: ['body'], 5 | errorMessage: 'scheduleIn must be present', 6 | isNumeric: true 7 | }, 8 | message: { 9 | in: ['body'], 10 | errorMessage: 'message must be present', 11 | isString: true 12 | }, 13 | queueName: { 14 | in: ['body'], 15 | errorMessage: 'Queue name is required' 16 | } 17 | }); 18 | -------------------------------------------------------------------------------- /postman/parcel-node-mongo.postman_environment.json: -------------------------------------------------------------------------------- 1 | { 2 | "id": "4f732a73-0aac-45ab-b984-6741a6a42662", 3 | "name": "parcel-node-mongo", 4 | "values": [ 5 | { 6 | "key": "baseUrl", 7 | "value": "http://localhost:9000", 8 | "type": "default", 9 | "enabled": true 10 | } 11 | ], 12 | "_postman_variable_scope": "environment", 13 | "_postman_exported_at": "2022-06-28T04:46:02.743Z", 14 | "_postman_exported_using": "Postman/9.20.3" 15 | } 16 | -------------------------------------------------------------------------------- /server/database/models/orders.js: -------------------------------------------------------------------------------- 1 | const mongoose = require('mongoose'); 2 | const { schema: productSchema } = require('database/models/products'); 3 | const schema = new mongoose.Schema({ 4 | purchasedProducts: [productSchema], 5 | totalPrice: { 6 | type: Number, 7 | required: true 8 | }, 9 | createdAt: { 10 | type: Date, 11 | default: Date.now 12 | } 13 | }); 14 | 15 | const Orders = mongoose.model('orders', schema); 16 | 17 | module.exports = { model: Orders, Orders, schema }; 18 | -------------------------------------------------------------------------------- /server/api/referencedOrders/index.js: -------------------------------------------------------------------------------- 1 | import { apiFailure, apiSuccess } from 'utils/apiUtils'; 2 | import { fetchAllPurchasedProducts } from '../utils'; 3 | 4 | export const fetchAllReferencedOrders = (router, model, _validator) => { 5 | router.use('/', async (req, res, next) => { 6 | try { 7 | const items = await fetchAllPurchasedProducts(model, req.query); 8 | return apiSuccess(res, items); 9 | } catch (err) { 10 | return apiFailure(res, err.message); 11 | } 12 | }); 13 | }; 14 | -------------------------------------------------------------------------------- /server/database/models/unshardedReferencedOrders.js: -------------------------------------------------------------------------------- 1 | const mongoose = require('mongoose'); 2 | const schema = new mongoose.Schema({ 3 | purchasedProducts: [{ ref: 'products', type: 'ObjectId' }], 4 | totalPrice: { 5 | type: Number, 6 | required: true 7 | } 8 | }); 9 | 10 | const UnshardedReferencedOrders = mongoose.model( 11 | 'unshardedReferencedOrders', 12 | schema 13 | ); 14 | 15 | module.exports = { 16 | model: UnshardedReferencedOrders, 17 | UnshardedReferencedOrders, 18 | schema 19 | }; 20 | -------------------------------------------------------------------------------- /server/api/unshardedReferencedOrders/index.js: -------------------------------------------------------------------------------- 1 | import { apiFailure, apiSuccess } from 'utils/apiUtils'; 2 | import { fetchAllPurchasedProducts } from '../utils'; 3 | 4 | export const fetchAllUnshardedReferencedOrders = (app, model, name) => { 5 | app.use('/', async (req, res, next) => { 6 | try { 7 | const items = await fetchAllPurchasedProducts(model, req.query); 8 | return apiSuccess(res, items); 9 | } catch (err) { 10 | return apiFailure(res, err.message); 11 | } 12 | }); 13 | }; 14 | -------------------------------------------------------------------------------- /Dockerfile: -------------------------------------------------------------------------------- 1 | FROM node:14.17-alpine AS build1 2 | ARG ENVIRONMENT_NAME 3 | ENV ENVIRONMENT_NAME $ENVIRONMENT_NAME 4 | RUN mkdir -p /app-build 5 | ADD . /app-build 6 | WORKDIR /app-build 7 | RUN --mount=type=cache,target=/root/.yarn YARN_CACHE_FOLDER=/root/.yarn yarn --frozen-lockfile 8 | RUN yarn 9 | RUN yarn build:dev 10 | 11 | FROM node:14.17-alpine 12 | ARG ENVIRONMENT_NAME 13 | ENV ENVIRONMENT_NAME $ENVIRONMENT_NAME 14 | RUN apk add yarn 15 | ADD package.json / 16 | ADD . / 17 | COPY --from=build1 /app-build/dist ./dist 18 | 19 | CMD ["yarn", "start"] 20 | EXPOSE 9000 -------------------------------------------------------------------------------- /server/database/models/products.js: -------------------------------------------------------------------------------- 1 | const mongoose = require('mongoose'); 2 | const schema = mongoose.Schema({ 3 | name: { 4 | type: String, 5 | required: true 6 | }, 7 | price: { 8 | type: Number, 9 | required: true, 10 | min: 0 11 | }, 12 | category: { 13 | type: String, 14 | required: true 15 | }, 16 | quantity: { 17 | type: Number 18 | } 19 | }); 20 | 21 | const Products = mongoose.model('products', schema); 22 | 23 | module.exports = { model: Products, Products, schema }; 24 | -------------------------------------------------------------------------------- /setup-shards/scripts/setup/config-server.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | echo "config servers------------------------" 3 | 4 | # create replica sets of configsvr 5 | echo "rs.initiate( 6 | { 7 | _id : \"cfgrs\", 8 | configsvr: true, 9 | members: 10 | [ 11 | { _id: 0, host: \"$ip:30001\" }, 12 | { _id: 1, host: \"$ip:30002\" }, 13 | { _id: 2, host: \"$ip:30003\" } 14 | ] 15 | } 16 | )" > configsvr.txt 17 | 18 | # connect and setup replica sets for configsvr 19 | mongosh mongodb://$ip:30003 < configsvr.txt -------------------------------------------------------------------------------- /setup-shards/scripts/setup/shard1.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | echo "shard1------------------------" 4 | 5 | echo " 6 | rs.initiate( 7 | { 8 | _id: \"shard1rs\", 9 | members: 10 | [ 11 | { _id : 0, host : \"$ip:50001\" }, 12 | { _id : 1, host : \"$ip:50002\" }, 13 | { _id : 2, host : \"$ip:50003\" } 14 | ] 15 | } 16 | )" > shard1rs.txt 17 | 18 | mongosh mongodb://$ip:50003 < shard1rs.txt 19 | 20 | echo " 21 | sh.addShard(\"shard1rs/$ip:50001,$ip:50002,$ip:50003\") 22 | " > mongos.txt 23 | 24 | mongosh mongodb://$ip:60000 < mongos.txt 25 | -------------------------------------------------------------------------------- /setup-shards/scripts/setup/shard2.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | echo "shard2------------------------" 4 | 5 | echo " 6 | rs.initiate( 7 | { 8 | _id: \"shard2rs\", 9 | members: 10 | [ 11 | { _id : 0, host : \"$ip:50004\" }, 12 | { _id : 1, host : \"$ip:50005\" }, 13 | { _id : 2, host : \"$ip:50006\" } 14 | ] 15 | } 16 | )" > shard2rs.txt 17 | 18 | mongosh mongodb://$ip:50004 < shard2rs.txt 19 | 20 | echo " 21 | sh.addShard(\"shard2rs/$ip:50004,$ip:50005,$ip:50006\") 22 | " > mongos.txt 23 | 24 | mongosh mongodb://$ip:60000 < mongos.txt 25 | -------------------------------------------------------------------------------- /setup-shards/scripts/setup/shard3.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | echo "shard3------------------------" 4 | 5 | echo " 6 | rs.initiate( 7 | { 8 | _id: \"shard3rs\", 9 | members: 10 | [ 11 | { _id : 0, host : \"$ip:50007\" }, 12 | { _id : 1, host : \"$ip:50008\" }, 13 | { _id : 2, host : \"$ip:50009\" } 14 | ] 15 | } 16 | )" > shard3rs.txt 17 | 18 | mongosh mongodb://$ip:50007 < shard3rs.txt 19 | 20 | 21 | echo " 22 | sh.addShard(\"shard3rs/$ip:50007,$ip:50008,$ip:50009\") 23 | " > mongos.txt 24 | 25 | mongosh mongodb://$ip:60000 < mongos.txt -------------------------------------------------------------------------------- /setup-shards/scripts/setup/shard4.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | echo "shard4------------------------" 4 | 5 | echo " 6 | rs.initiate( 7 | { 8 | _id: \"shard4rs\", 9 | members: 10 | [ 11 | { _id : 0, host : \"$ip:50010\" }, 12 | { _id : 1, host : \"$ip:50011\" }, 13 | { _id : 2, host : \"$ip:50012\" } 14 | ] 15 | } 16 | )" > shard4rs.txt 17 | 18 | mongosh mongodb://$ip:50010 < shard4rs.txt 19 | 20 | 21 | echo " 22 | sh.addShard(\"shard4rs/$ip:50010,$ip:50011,$ip:50012\") 23 | " > mongos.txt 24 | 25 | mongosh mongodb://$ip:60000 < mongos.txt -------------------------------------------------------------------------------- /server/utils/custom/scheduleJob.js: -------------------------------------------------------------------------------- 1 | import moment from 'moment'; 2 | import { getQueue } from '../queue'; 3 | 4 | export const scheduleJob = (scheduleIn, message, queueName) => 5 | getQueue(queueName) 6 | .add({ message: message }, { delay: scheduleIn }) 7 | .then(job => { 8 | console.log( 9 | `${moment()}::Job with id: ${ 10 | job.id 11 | } scheduled in ${scheduleIn} milliseconds` 12 | ); 13 | return { success: true }; 14 | }) 15 | .catch(err => ({ success: false })); 16 | -------------------------------------------------------------------------------- /setup-shards/scripts/setup/docker-containers.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | echo "docker containers------------------------" 4 | # setup env 5 | echo "ip=$ip" > setup-shards/mongos/.env 6 | 7 | docker-compose -f setup-shards/configsvr/docker-compose.yml up -d 8 | docker-compose -f setup-shards/shardsvr1/docker-compose.yml up -d 9 | docker-compose -f setup-shards/mongos/docker-compose.yml up -d 10 | docker-compose -f setup-shards/shardsvr2/docker-compose.yml up -d 11 | docker-compose -f setup-shards/shardsvr3/docker-compose.yml up -d 12 | docker-compose -f setup-shards/shardsvr4/docker-compose.yml up -d 13 | sleep 20 -------------------------------------------------------------------------------- /config/index.js: -------------------------------------------------------------------------------- 1 | const envFile = `.env.${process.env.ENVIRONMENT_NAME}`; 2 | require('dotenv').config({ 3 | path: envFile 4 | }); 5 | 6 | module.exports = () => ({ 7 | domain: process.env.DOMAIN, 8 | clientId: process.env.CLIENT_ID, 9 | clientSecret: process.env.CLIENT_SECRET, 10 | audience: process.env.AUDIENCE, 11 | grantType: process.env.GRANT_TYPE, 12 | connection: process.env.CONNECTION, 13 | frontendClientId: process.env.FRONTEND_CLIENT_ID, 14 | frontendGrantType: process.env.FRONTEND_GRANT_TYPE, 15 | apiAudience: process.env.API_AUDIENCE, 16 | memory: true 17 | }); 18 | -------------------------------------------------------------------------------- /__tests__/__load__/libs/shim/jsonSchema.js: -------------------------------------------------------------------------------- 1 | /* global postman */ 2 | 3 | import Ajv from '../ajv.js'; 4 | 5 | const Extend = Symbol.for('extend'); 6 | 7 | Object.assign(postman[Extend], { 8 | jsonSchema(store, schema, options) { 9 | const ajv = new Ajv(options); 10 | const validate = ajv.compile(schema); 11 | store.test.push(response => validate(store.response.body.json)); 12 | }, 13 | 14 | jsonSchemaNot(store, schema, options) { 15 | const ajv = new Ajv(options); 16 | const validate = ajv.compile(schema); 17 | store.test.push(response => !validate(store.response.body.json)); 18 | }, 19 | }); 20 | -------------------------------------------------------------------------------- /server/database/models/storeProducts.js: -------------------------------------------------------------------------------- 1 | const mongoose = require('mongoose'); 2 | const { schema: Store } = require('./stores'); 3 | const { schema: Product } = require('./products'); 4 | const schema = new mongoose.Schema({ 5 | productId: { 6 | type: mongoose.Types.ObjectId, 7 | required: true 8 | }, 9 | storeId: { 10 | type: mongoose.Types.ObjectId, 11 | required: true 12 | }, 13 | store: Store, 14 | product: Product 15 | }); 16 | 17 | const StoreProducts = mongoose.model('storeProducts', schema); 18 | module.exports = { model: StoreProducts, StoreProducts, schema }; 19 | -------------------------------------------------------------------------------- /__tests__/server/middlewares/rateLimiter/index.test.js: -------------------------------------------------------------------------------- 1 | jest.mock('express-rate-limit'); 2 | 3 | describe('rateLimiter test', () => { 4 | it('should call rateLimiter', async () => { 5 | let options = { 6 | windowMs: 15 * 60 * 1000, 7 | max: 100, 8 | standardHeaders: true, 9 | legacyHeaders: false 10 | }; 11 | const limiter = require('express-rate-limit'); 12 | const { rateLimiter } = require('middlewares/rateLimiter'); 13 | rateLimiter(options); 14 | expect(limiter).toBeCalledWith(expect.objectContaining(options)); 15 | }); 16 | }); 17 | -------------------------------------------------------------------------------- /server/database/models/supplierProducts.js: -------------------------------------------------------------------------------- 1 | const mongoose = require('mongoose'); 2 | const { schema: Supplier } = require('./suppliers'); 3 | const { schema: Product } = require('./products'); 4 | const schema = new mongoose.Schema({ 5 | productId: { 6 | type: mongoose.Types.ObjectId, 7 | required: true 8 | }, 9 | supplierId: { 10 | type: mongoose.Types.ObjectId, 11 | required: true 12 | }, 13 | supplier: Supplier, 14 | product: Product 15 | }); 16 | 17 | const SupplierProducts = mongoose.model('supplierProducts', schema); 18 | module.exports = { model: SupplierProducts, SupplierProducts, schema }; 19 | -------------------------------------------------------------------------------- /jest.setup.js: -------------------------------------------------------------------------------- 1 | require('dotenv').config({ 2 | path: `.env.test` 3 | }); 4 | process.env.ENVIRONMENT_NAME = 'test'; 5 | beforeEach(() => { 6 | process.env = { ...process.env, ENVIRONMENT_NAME: 'test' }; 7 | }); 8 | 9 | afterEach(() => { 10 | jest.clearAllMocks(); 11 | jest.resetAllMocks(); 12 | jest.resetModules(); 13 | }); 14 | 15 | jest.doMock('ioredis', () => 16 | jest.fn().mockImplementation(() => ({ 17 | publish: () => ({}), 18 | set: msg => 19 | JSON.stringify({ 20 | msg 21 | }), 22 | get: msg => 23 | JSON.stringify({ 24 | msg 25 | }) 26 | })) 27 | ); 28 | -------------------------------------------------------------------------------- /server/database/models/users.js: -------------------------------------------------------------------------------- 1 | const mongoose = require('mongoose'); 2 | const constants = require('utils/constants'); 3 | 4 | const schema = new mongoose.Schema({ 5 | firstName: { 6 | type: String, 7 | required: true 8 | }, 9 | lastName: { 10 | type: String, 11 | required: true 12 | }, 13 | email: { 14 | type: String, 15 | required: true 16 | }, 17 | authId: { 18 | type: String 19 | }, 20 | role: { 21 | type: String, 22 | enum: Object.values(constants.SCOPE_TYPE) 23 | } 24 | }); 25 | 26 | const Users = mongoose.model('users', schema); 27 | module.exports = { model: Users, Users, schema }; 28 | -------------------------------------------------------------------------------- /docker-compose.yml: -------------------------------------------------------------------------------- 1 | version: "3" 2 | services: 3 | app: 4 | build: 5 | context: . 6 | args: 7 | ENVIRONMENT_NAME: ${ENVIRONMENT_NAME} 8 | dockerfile: Dockerfile 9 | ports: 10 | - "9000:9000" 11 | env_file: 12 | - .env.local 13 | environment: 14 | - REDIS_DOMAIN=redis 15 | - MONGO_BASE_URI=mongo 16 | depends_on: 17 | - mongo 18 | 19 | mongo: 20 | container_name: mongo 21 | image: mongo 22 | ports: 23 | - "27017:27017" 24 | 25 | redis: 26 | depends_on: 27 | - mongo 28 | image: "redis:alpine" 29 | ports: 30 | - "6379:6379" 31 | command: ["redis-server", "--bind", "redis", "--port", "6379"] -------------------------------------------------------------------------------- /server/daos/product.js: -------------------------------------------------------------------------------- 1 | import { Products } from 'database/models/products'; 2 | import { redis } from 'services/redis'; 3 | 4 | export const getAllCategories = async () => { 5 | try { 6 | const categoriesFromRedis = await redis.get('categories'); 7 | let categories; 8 | if (!categoriesFromRedis) { 9 | const allCategories = await Products.distinct('category'); 10 | redis.set('categories', JSON.stringify(allCategories)); 11 | categories = allCategories; 12 | } else { 13 | categories = JSON.parse(categoriesFromRedis); 14 | } 15 | return categories; 16 | } catch (error) { 17 | throw error; 18 | } 19 | }; 20 | -------------------------------------------------------------------------------- /server/services/circuitBreaker.js: -------------------------------------------------------------------------------- 1 | import CircuitBreaker from 'opossum'; 2 | import log from 'utils/logger'; 3 | 4 | const options = { 5 | timeout: 3000, 6 | errorThresholdPercentage: 50, 7 | resetTimeout: 30000 8 | }; 9 | 10 | export const newCircuitBreaker = (func, fallbackMsg) => { 11 | const breaker = new CircuitBreaker(func, options); 12 | breaker.fallback((params, err) => { 13 | log.error( 14 | 'fallbackMsg:', 15 | fallbackMsg, 16 | 'params: ', 17 | params, 18 | 'error:', 19 | err.message 20 | ); 21 | return `${fallbackMsg}. ${err.message || err}`; 22 | }); 23 | return breaker; 24 | }; 25 | -------------------------------------------------------------------------------- /.env.example: -------------------------------------------------------------------------------- 1 | SLACK_WEBHOOK_URL=https://hooks.slack.com/services/YOUR/SLACK/WEBHOOK 2 | # AUTH0 MANAGEMENT SDK 3 | DOMAIN=devb0.us.auth0.com 4 | CLIENT_ID=oN2fwnJrkOkyXNoRyHNiFq 5 | CLIENT_SECRET=nZh5MDB9dd0sjkHJNKn6hVxifnlCrYJF-dqk2Nm 6 | AUDIENCE=https://devb0.us.auth0.com/api/v2/ 7 | GRANT_TYPE=client_credentials 8 | CONNECTION=Username-Password-Authentication 9 | SECRET=5qwBKm4MiKMTPzr0a 10 | 11 | # AUTHO FRONTEND 12 | FRONTEND_CLIENT_ID=bcJD0I08jykjwYBcxBGy 13 | FRONTEND_GRANT_TYPE=http://auth0.com/oauth/grant-type/password-realm 14 | 15 | # AUTH0 api 16 | API_AUDIENCE=https://node-express-demo 17 | 18 | REDIS_PORT=6379 19 | REDIS_DOMAIN=localhost 20 | MONGO_PORT=27017 21 | MONGO_BASE_URI=localhost 22 | MONGO_DB_NAME=ecommerce -------------------------------------------------------------------------------- /__tests__/server/utils/slackNotify.test.js: -------------------------------------------------------------------------------- 1 | describe('slackNotify tests', () => { 2 | const sendSpy = jest.fn(); 3 | jest.doMock('slack-notify', () => () => ({ 4 | send: sendSpy 5 | })); 6 | it('should call notifySlack function', async () => { 7 | const { notifySlack } = require('utils/slackNotify'); 8 | 9 | let username = 'doe@wednesday.is'; 10 | let message = 'Work done!'; 11 | notifySlack(username, message); 12 | expect(sendSpy).toBeCalledWith( 13 | expect.objectContaining({ 14 | text: JSON.stringify(message), 15 | username 16 | }) 17 | ); 18 | notifySlack(username, message); 19 | }); 20 | }); 21 | -------------------------------------------------------------------------------- /.env.test: -------------------------------------------------------------------------------- 1 | SLACK_WEBHOOK_URL=https://hooks.slack.com/services/YOUR/SLACK/WEBHOOK 2 | # AUTH0 MANAGEMENT SDK 3 | DOMAIN=devb0.us.auth0.com 4 | CLIENT_ID=oN2fwnJrkOkyXNoRyHNiFq 5 | CLIENT_SECRET=nZh5MDB9dd0sjkHJNKn6hVxifnlCrYJF-dqk2Nm 6 | AUDIENCE=https://devb0.us.auth0.com/api/v2/ 7 | GRANT_TYPE=client_credentials 8 | CONNECTION=Username-Password-Authentication 9 | SECRET=5qwBKm4MiKMTPzr0a 10 | 11 | # AUTHO FRONTEND 12 | FRONTEND_CLIENT_ID=bcJD0I08jykjwYBcxBGy 13 | FRONTEND_GRANT_TYPE=http://auth0.com/oauth/grant-type/password-realm 14 | 15 | # AUTH0 api 16 | API_AUDIENCE=https://express-demo 17 | 18 | ENVIRONMENT_NAME=test 19 | NODE_ENV=test 20 | REDIS_PORT=6379 21 | REDIS_DOMAIN=localhost 22 | MONGO_PORT=27017 23 | MONGO_BASE_URI=localhost 24 | MONGO_DB_NAME=ecommerce -------------------------------------------------------------------------------- /server/database/mongo.js: -------------------------------------------------------------------------------- 1 | import mongoose from 'mongoose'; 2 | import { getMongoUri } from 'utils/mongoConstants.js'; 3 | import log from 'utils/logger'; 4 | 5 | export const mongoConnector = () => { 6 | let db; 7 | 8 | if (mongoose.connection.readyState === 1) { 9 | log.info('Mongo already connected.'); 10 | db = mongoose.connection; 11 | } else { 12 | mongoose.connect(getMongoUri()); 13 | db = mongoose.connection; 14 | db.on('error', err => { 15 | log.error('error', err); 16 | }); 17 | db.once('open', () => 18 | log.info( 19 | 'mongo connection successfully connected to ', 20 | getMongoUri() 21 | ) 22 | ); 23 | } 24 | return db; 25 | }; 26 | -------------------------------------------------------------------------------- /seeders/unsharded.js: -------------------------------------------------------------------------------- 1 | const { 2 | runSeeders, 3 | connectToMongo, 4 | createOrder, 5 | createProduct 6 | } = require('./utils'); 7 | 8 | function seed() { 9 | connectToMongo() 10 | .then(async () => { 11 | console.log('connected to mongodb::unsharded'); 12 | const divisor = process.env.DIVISOR || 100; 13 | for (let i = 0; i < 5000 / divisor; i++) { 14 | const products = []; 15 | for (let j = 0; j < 3; j++) { 16 | products.push(await createProduct()); 17 | } 18 | await createOrder(products); 19 | } 20 | }) 21 | .catch(err => { 22 | console.log('Error is ', err); 23 | }); 24 | } 25 | 26 | runSeeders(seed); 27 | -------------------------------------------------------------------------------- /server/api/cronJob/index.js: -------------------------------------------------------------------------------- 1 | import { validationResult } from 'express-validator'; 2 | import { apiFailure, apiSuccess } from 'utils/apiUtils'; 3 | import { scheduleJob } from 'utils/custom/scheduleJob'; 4 | import cronJobValidator from './validator'; 5 | 6 | const cronJob = async (req, res, next) => { 7 | try { 8 | const errors = validationResult(req); 9 | if (!errors.isEmpty()) { 10 | throw { message: errors.errors[0].msg }; 11 | } 12 | const { scheduleIn, message, queueName } = req.body; 13 | const data = await scheduleJob(scheduleIn, message, queueName); 14 | return apiSuccess(res, data); 15 | } catch (err) { 16 | return apiFailure(res, err.message); 17 | } 18 | }; 19 | 20 | export { cronJob, cronJobValidator }; 21 | -------------------------------------------------------------------------------- /.github/workflows/ci.yml: -------------------------------------------------------------------------------- 1 | name: Node Mongo Express CI 2 | 3 | on: 4 | pull_request_target: 5 | branches: [main] 6 | 7 | jobs: 8 | build_and_test: 9 | name: Build & Test 10 | runs-on: ubuntu-latest 11 | strategy: 12 | matrix: 13 | node-version: [14.17.x] 14 | steps: 15 | - uses: actions/checkout@v2 16 | - name: Setup environment 17 | uses: actions/setup-node@v2 18 | with: 19 | cache: 'yarn' 20 | node-version: ${{ matrix.node-version }} 21 | - name: Install dependencies 22 | run: yarn install 23 | - name: Lint 24 | run: yarn lint 25 | - name: Test 26 | run: yarn test 27 | -------------------------------------------------------------------------------- /server/utils/auth0.js: -------------------------------------------------------------------------------- 1 | import { AuthenticationClient, ManagementClient } from 'auth0'; 2 | import config from 'config'; 3 | 4 | let client; 5 | 6 | export const auth0 = () => { 7 | if (!client) { 8 | client = new AuthenticationClient({ 9 | domain: config().domain, 10 | clientId: config().clientId, 11 | clientSecret: config().clientSecret 12 | }); 13 | } 14 | return client; 15 | }; 16 | 17 | export const clientCredentialsGrant = () => 18 | auth0().clientCredentialsGrant({ 19 | audience: config().audience, 20 | grant_type: config().grantType 21 | }); 22 | 23 | export const managementClient = auth => 24 | new ManagementClient({ 25 | token: auth.access_token, 26 | domain: config().domain 27 | }); 28 | -------------------------------------------------------------------------------- /server/api/assignRoles/validator.js: -------------------------------------------------------------------------------- 1 | import { checkSchema } from 'express-validator'; 2 | import { SCOPE_TYPE } from 'utils/constants'; 3 | 4 | export default checkSchema({ 5 | authId: { 6 | in: ['body'], 7 | errorMessage: 'user auth0 id must be present', 8 | isString: true 9 | }, 10 | role: { 11 | in: ['body'], 12 | errorMessage: 'role must be present', 13 | isIn: { 14 | options: [ 15 | [ 16 | SCOPE_TYPE.SUPER_ADMIN, 17 | SCOPE_TYPE.ADMIN, 18 | SCOPE_TYPE.STORE_ADMIN, 19 | SCOPE_TYPE.SUPPLIER_ADMIN 20 | ] 21 | ], 22 | errorMessage: 'Invalid role.' 23 | }, 24 | isArray: true 25 | } 26 | }); 27 | -------------------------------------------------------------------------------- /server/api/roles/validator.js: -------------------------------------------------------------------------------- 1 | import { checkSchema } from 'express-validator'; 2 | import { SCOPE_TYPE } from 'utils/constants'; 3 | 4 | export default checkSchema({ 5 | name: { 6 | in: ['body'], 7 | errorMessage: 'name must be present', 8 | isIn: { 9 | options: [ 10 | [ 11 | SCOPE_TYPE.SUPER_ADMIN, 12 | SCOPE_TYPE.ADMIN, 13 | SCOPE_TYPE.STORE_ADMIN, 14 | SCOPE_TYPE.SUPPLIER_ADMIN 15 | ] 16 | ], 17 | errorMessage: 'Invalid role name.' 18 | }, 19 | isString: true 20 | }, 21 | description: { 22 | in: ['body'], 23 | errorMessage: 'description must be present', 24 | isString: true 25 | } 26 | }); 27 | -------------------------------------------------------------------------------- /webpack/dev.config.js: -------------------------------------------------------------------------------- 1 | /** 2 | * DEVELOPMENT WEBPACK CONFIGURATION 3 | */ 4 | 5 | const path = require('path'); 6 | const webpack = require('webpack'); 7 | 8 | module.exports = require('./server.config')({ 9 | mode: 'development', 10 | // Add hot reloading in development 11 | entry: [ 12 | 'webpack-hot-middleware/client?reload=true', 13 | path.join(process.cwd(), '/server/index.js') 14 | ], 15 | // Don't use hashes in dev mode for better performance 16 | babelQuery: { 17 | presets: ['@babel/preset-env'], 18 | plugins: ['@babel/plugin-transform-runtime'] 19 | }, 20 | // Add development plugins 21 | plugins: [ 22 | new webpack.HotModuleReplacementPlugin() // Tell webpack we want hot reloading 23 | ], 24 | optimization: {} 25 | }); 26 | -------------------------------------------------------------------------------- /seeders/unshardedReferenced.js: -------------------------------------------------------------------------------- 1 | const { 2 | runSeeders, 3 | connectToMongo, 4 | createOrder, 5 | createProduct 6 | } = require('./utils'); 7 | 8 | function seed() { 9 | connectToMongo() 10 | .then(async () => { 11 | console.log('connected to mongodb::unshardedReferenced'); 12 | const divisor = process.env.DIVISOR || 100; 13 | for (let i = 0; i < 5000 / divisor; i++) { 14 | const products = []; 15 | for (let j = 0; j < 3; j++) { 16 | products.push(await createProduct()); 17 | } 18 | await createOrder(products, false, true); 19 | } 20 | }) 21 | .catch(err => { 22 | console.log('Error is ', err); 23 | }); 24 | } 25 | 26 | runSeeders(seed); 27 | -------------------------------------------------------------------------------- /__tests__/server/utils/custom/scheduleJob.test.js: -------------------------------------------------------------------------------- 1 | import { scheduleJob } from 'utils/custom/scheduleJob'; 2 | import { getQueue } from 'utils/queue'; 3 | 4 | describe('Schedule job tests', () => { 5 | const scheduleIn = 1000; 6 | const message = 'Sample message'; 7 | const queueName = 'sampleQueue'; 8 | it('should schedule a job after the given time', async () => { 9 | const res = await scheduleJob(scheduleIn, message, queueName); 10 | expect(res).toEqual({ success: true }); 11 | }); 12 | it('should return success false in case of an error', async () => { 13 | jest.spyOn(getQueue(queueName), 'add').mockImplementation(() => 14 | Promise.reject('error') 15 | ); 16 | const res = await scheduleJob(scheduleIn, message, queueName); 17 | expect(res).toEqual({ success: false }); 18 | }); 19 | }); 20 | -------------------------------------------------------------------------------- /server/utils/apiUtils.js: -------------------------------------------------------------------------------- 1 | import { validationResult } from 'express-validator'; 2 | 3 | export const apiSuccess = (res, data) => { 4 | log.info('apiSuccess', {}); 5 | return res.send({ data }).status(200); 6 | }; 7 | 8 | export const apiFailure = (res, error, status = 500) => { 9 | log.info('apiFailure', { error }); 10 | return res.status(status).send({ error }); 11 | }; 12 | 13 | export const createValidatorMiddlewares = validator => { 14 | const middlewares = []; 15 | if (validator) { 16 | const checkValidtion = (req, res, next) => { 17 | const errors = validationResult(req); 18 | if (!errors.isEmpty()) { 19 | return apiFailure(res, errors.array(), 400); 20 | } 21 | return next(); 22 | }; 23 | middlewares.push(validator, checkValidtion); 24 | } 25 | return middlewares; 26 | }; 27 | -------------------------------------------------------------------------------- /seeders/referenced.js: -------------------------------------------------------------------------------- 1 | const { 2 | runSeeders, 3 | connectToMongo, 4 | createOrderWithProductReferenced, 5 | createProduct 6 | } = require('./utils'); 7 | 8 | async function seed() { 9 | await Promise.all([ 10 | connectToMongo() 11 | .then(async () => { 12 | const divisor = process.env.DIVISOR || 100; 13 | console.log('connected to mongodb::referenced'); 14 | for (let i = 0; i < 5000 / divisor; i++) { 15 | const products = []; 16 | for (let j = 0; j < 3; j++) { 17 | products.push(await createProduct()); 18 | } 19 | await createOrderWithProductReferenced(products); 20 | } 21 | }) 22 | .catch(err => { 23 | console.log('Error is ', err); 24 | }) 25 | ]); 26 | } 27 | 28 | runSeeders(seed); 29 | -------------------------------------------------------------------------------- /server/api/roles/index.js: -------------------------------------------------------------------------------- 1 | import { validationResult } from 'express-validator'; 2 | import { apiFailure, apiSuccess } from 'utils/apiUtils'; 3 | import { clientCredentialsGrant, managementClient } from 'utils/auth0'; 4 | import roleValidator from './validator'; 5 | 6 | const roles = async (req, res) => { 7 | try { 8 | const errors = validationResult(req); 9 | if (!errors.isEmpty()) { 10 | throw { message: errors.errors[0].msg }; 11 | } 12 | const { name, description } = req.body; 13 | const auth = await clientCredentialsGrant(); 14 | const mgmtAuth0 = await managementClient(auth); 15 | const role = await mgmtAuth0.createRole({ 16 | name, 17 | description 18 | }); 19 | return apiSuccess(res, role); 20 | } catch (err) { 21 | return apiFailure(res, err.message); 22 | } 23 | }; 24 | 25 | export { roles, roleValidator }; 26 | -------------------------------------------------------------------------------- /setup-shards/shardsvr1/docker-compose.yml: -------------------------------------------------------------------------------- 1 | version: "3.8" 2 | 3 | services: 4 | shard1svr1: 5 | container_name: shard1svr1 6 | restart: on-failure 7 | image: mongo 8 | command: mongod --shardsvr --bind_ip_all --replSet shard1rs --port 27017 --dbpath /data/db 9 | ports: 10 | - 50001:27017 11 | volumes: 12 | - shard1svr1:/data/db 13 | 14 | shard1svr2: 15 | container_name: shard1svr2 16 | image: mongo 17 | command: mongod --shardsvr --bind_ip_all --replSet shard1rs --port 27017 --dbpath /data/db 18 | ports: 19 | - 50002:27017 20 | volumes: 21 | - shard1svr2:/data/db 22 | 23 | shard1svr3: 24 | container_name: shard1svr3 25 | image: mongo 26 | command: mongod --shardsvr --bind_ip_all --replSet shard1rs --port 27017 --dbpath /data/db 27 | ports: 28 | - 50003:27017 29 | volumes: 30 | - shard1svr3:/data/db 31 | 32 | volumes: 33 | shard1svr1: {} 34 | shard1svr2: {} 35 | shard1svr3: {} 36 | -------------------------------------------------------------------------------- /setup-shards/shardsvr2/docker-compose.yml: -------------------------------------------------------------------------------- 1 | version: "3.8" 2 | 3 | services: 4 | shard2svr1: 5 | container_name: shard2svr1 6 | restart: on-failure 7 | image: mongo 8 | command: mongod --shardsvr --bind_ip_all --replSet shard2rs --port 27017 --dbpath /data/db 9 | ports: 10 | - 50004:27017 11 | volumes: 12 | - shard2svr1:/data/db 13 | 14 | shard2svr2: 15 | container_name: shard2svr2 16 | image: mongo 17 | command: mongod --shardsvr --bind_ip_all --replSet shard2rs --port 27017 --dbpath /data/db 18 | ports: 19 | - 50005:27017 20 | volumes: 21 | - shard2svr2:/data/db 22 | 23 | shard2svr3: 24 | container_name: shard2svr3 25 | image: mongo 26 | command: mongod --shardsvr --bind_ip_all --replSet shard2rs --port 27017 --dbpath /data/db 27 | ports: 28 | - 50006:27017 29 | volumes: 30 | - shard2svr3:/data/db 31 | 32 | volumes: 33 | shard2svr1: {} 34 | shard2svr2: {} 35 | shard2svr3: {} 36 | -------------------------------------------------------------------------------- /setup-shards/shardsvr3/docker-compose.yml: -------------------------------------------------------------------------------- 1 | version: "3.8" 2 | 3 | services: 4 | shard3svr1: 5 | container_name: shard3svr1 6 | restart: on-failure 7 | image: mongo 8 | command: mongod --shardsvr --bind_ip_all --replSet shard3rs --port 27017 --dbpath /data/db 9 | ports: 10 | - 50007:27017 11 | volumes: 12 | - shard3svr1:/data/db 13 | 14 | shard3svr2: 15 | container_name: shard3svr2 16 | image: mongo 17 | command: mongod --shardsvr --bind_ip_all --replSet shard3rs --port 27017 --dbpath /data/db 18 | ports: 19 | - 50008:27017 20 | volumes: 21 | - shard3svr2:/data/db 22 | 23 | shard3svr3: 24 | container_name: shard3svr3 25 | image: mongo 26 | command: mongod --shardsvr --bind_ip_all --replSet shard3rs --port 27017 --dbpath /data/db 27 | ports: 28 | - 50009:27017 29 | volumes: 30 | - shard3svr3:/data/db 31 | 32 | volumes: 33 | shard3svr1: {} 34 | shard3svr2: {} 35 | shard3svr3: {} 36 | -------------------------------------------------------------------------------- /setup-shards/shardsvr4/docker-compose.yml: -------------------------------------------------------------------------------- 1 | version: "3.8" 2 | 3 | services: 4 | shard4svr1: 5 | container_name: shard4svr1 6 | restart: on-failure 7 | image: mongo 8 | command: mongod --shardsvr --bind_ip_all --replSet shard4rs --port 27017 --dbpath /data/db 9 | ports: 10 | - 50010:27017 11 | volumes: 12 | - shard4svr1:/data/db 13 | 14 | shard4svr2: 15 | container_name: shard4svr2 16 | image: mongo 17 | command: mongod --shardsvr --bind_ip_all --replSet shard4rs --port 27017 --dbpath /data/db 18 | ports: 19 | - 50011:27017 20 | volumes: 21 | - shard4svr2:/data/db 22 | 23 | shard4svr3: 24 | container_name: shard4svr3 25 | image: mongo 26 | command: mongod --shardsvr --bind_ip_all --replSet shard4rs --port 27017 --dbpath /data/db 27 | ports: 28 | - 50012:27017 29 | volumes: 30 | - shard4svr3:/data/db 31 | 32 | volumes: 33 | shard4svr1: {} 34 | shard4svr2: {} 35 | shard4svr3: {} 36 | -------------------------------------------------------------------------------- /__tests__/server/utils/index.test.js: -------------------------------------------------------------------------------- 1 | import { getModelFiles, isTestEnv } from 'utils/index'; 2 | 3 | describe('isTestEnv tests', () => { 4 | it('should give isTestEnv true if ENVIRONMENT_NAME is test', () => { 5 | process.env.ENVIRONMENT_NAME = 'test'; 6 | expect(isTestEnv()).toBe(true); 7 | }); 8 | it('should give isTestEnv true if NODE_ENV is test', () => { 9 | process.env.ENVIRONMENT_NAME = ''; 10 | process.env.NODE_ENV = 'test'; 11 | expect(isTestEnv()).toBe(true); 12 | }); 13 | 14 | it('should give isTestEnv false if neither NODE_ENV nor ENVIRONMENT_NAME is test', () => { 15 | process.env.ENVIRONMENT_NAME = ''; 16 | process.env.NODE_ENV = ''; 17 | expect(isTestEnv()).toBe(false); 18 | }); 19 | }); 20 | 21 | describe('getModelFiles tests', () => { 22 | it('should throw error when passed in value other than string', () => { 23 | expect(() => getModelFiles(123)).toThrow(); 24 | }); 25 | }); 26 | -------------------------------------------------------------------------------- /__tests__/server/api/products/index.test.js: -------------------------------------------------------------------------------- 1 | import { getCategories } from 'api/products'; 2 | import * as daos from 'daos/product'; 3 | import { mockData } from 'utils/mockData'; 4 | 5 | const { MOCK_CATEGORIES: mockCategories } = mockData; 6 | 7 | describe('categories functions tests', () => { 8 | it('should return all the distinct categories', async () => { 9 | const spy = jest 10 | .spyOn(daos, 'getAllCategories') 11 | .mockResolvedValueOnce(mockCategories); 12 | const res = await getCategories(); 13 | expect(res).toBe(mockCategories); 14 | expect(spy).toBeCalledTimes(1); 15 | }); 16 | 17 | it('should thrown an error if an error is thrown from db', async () => { 18 | let mockError = new Error('Mock Error'); 19 | jest.spyOn(daos, 'getAllCategories').mockRejectedValueOnce(mockError); 20 | expect(async () => { 21 | await getCategories(); 22 | }).rejects.toThrow(mockError); 23 | }); 24 | }); 25 | -------------------------------------------------------------------------------- /setup-shards/docker-compose.yml: -------------------------------------------------------------------------------- 1 | version: "3.8" 2 | services: 3 | mongodb1: 4 | image: mongo 5 | container_name: mongodb1 6 | command: ["--bind_ip_all", --replSet, rs0] 7 | environment: 8 | - PUID=1000 9 | - PGID=1000 10 | volumes: 11 | - ./mongodb1/database1:/data/db 12 | ports: 13 | - 27017:27017 14 | restart: unless-stopped 15 | mongodb2: 16 | image: mongo 17 | container_name: mongodb2 18 | command: ["--bind_ip_all", --replSet, rs0] 19 | environment: 20 | - PUID=1000 21 | - PGID=1000 22 | volumes: 23 | - ./mongodb2/database2:/data/db 24 | ports: 25 | - 27018:27017 26 | restart: unless-stopped 27 | mongodb3: 28 | image: mongo 29 | container_name: mongodb3 30 | command: ["--bind_ip_all", --replSet, rs0] 31 | environment: 32 | - PUID=1000 33 | - PGID=1000 34 | volumes: 35 | - ./mongodb3/database3:/data/db 36 | ports: 37 | - 27019:27017 38 | restart: unless-stopped 39 | -------------------------------------------------------------------------------- /server/api/aggregate/orders/index.js: -------------------------------------------------------------------------------- 1 | import { totalAmtForDate, totalByDateForCategory } from 'daos/order'; 2 | import { validationResult } from 'express-validator'; 3 | import { apiFailure, apiSuccess } from 'utils/apiUtils'; 4 | import aggregatedOrderAmountValidator from './validator'; 5 | 6 | const fetchAggregatedOrderAmount = async (req, res) => { 7 | try { 8 | const errors = validationResult(req); 9 | if (!errors.isEmpty()) { 10 | throw { message: errors.errors[0].msg }; 11 | } 12 | const { date, category } = req.query; 13 | let totalOrderAmount; 14 | if (date && category) { 15 | totalOrderAmount = await totalByDateForCategory(date, category); 16 | } else { 17 | totalOrderAmount = await totalAmtForDate(date); 18 | } 19 | return apiSuccess(res, { totalOrderAmount }); 20 | } catch (err) { 21 | return apiFailure(res, err.message); 22 | } 23 | }; 24 | 25 | export { fetchAggregatedOrderAmount, aggregatedOrderAmountValidator }; 26 | -------------------------------------------------------------------------------- /setup-shards/scripts/setup/create-sharded-collections.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | 4 | # script to create sharded collections 5 | 6 | echo " 7 | 8 | use ecommerce 9 | 10 | sh.enableSharding(\"ecommerce\") 11 | 12 | db.createCollection(\"suppliers\"); 13 | db.createCollection(\"stores\"); 14 | db.createCollection(\"products\"); 15 | db.createCollection(\"orders\"); 16 | db.createCollection(\"store_products\"); 17 | db.createCollection(\"supplier_products\"); 18 | 19 | 20 | sh.shardCollection(\"ecommerce.suppliers\", { name: \"hashed\" }); 21 | sh.shardCollection(\"ecommerce.stores\", { name: \"hashed\" }); 22 | sh.shardCollection(\"ecommerce.products\", { name: \"hashed\" }); 23 | sh.shardCollection(\"ecommerce.orders\", { _id: \"hashed\" }); 24 | sh.shardCollection(\"ecommerce.referenced_orders\", { _id: \"hashed\" }); 25 | sh.shardCollection(\"ecommerce.store_products\", { productId: \"hashed\" }); 26 | sh.shardCollection(\"ecommerce.supplier_products\", { productId: \"hashed\" }); 27 | 28 | 29 | " > setup.txt 30 | 31 | mongosh mongodb://$ip:60000 < setup.txt -------------------------------------------------------------------------------- /server/api/routes/index.js: -------------------------------------------------------------------------------- 1 | import express from 'express'; 2 | import { login, loginValidator } from 'api/login'; 3 | import { roles, roleValidator } from 'api/roles'; 4 | import { assignRoles, assignRoleValidator } from 'api/assignRoles'; 5 | import { cronJob, cronJobValidator } from 'api/cronJob'; 6 | import { 7 | aggregatedOrderAmountValidator, 8 | fetchAggregatedOrderAmount 9 | } from 'api/aggregate/orders'; 10 | import { rateLimiter as limiter } from 'middlewares/rateLimiter'; 11 | 12 | const router = express.Router(); 13 | 14 | const rateLimiter = limiter({ 15 | windowMs: 0.5 * 60 * 1000, 16 | max: 10, 17 | standardHeaders: true, 18 | legacyHeaders: false 19 | }); 20 | 21 | router.post('/login', loginValidator, rateLimiter, login); 22 | router.post('/roles', rateLimiter, roleValidator, roles); 23 | router.put('/assign-roles', assignRoleValidator, assignRoles); 24 | router.post('/cron-job', cronJobValidator, cronJob); 25 | 26 | router.get( 27 | '/aggregate/order-amount', 28 | aggregatedOrderAmountValidator, 29 | fetchAggregatedOrderAmount 30 | ); 31 | 32 | export default router; 33 | -------------------------------------------------------------------------------- /jest.config.json: -------------------------------------------------------------------------------- 1 | { 2 | "testEnvironment": "node", 3 | "setupFilesAfterEnv": ["./jest.setup.js"], 4 | "collectCoverageFrom": [ 5 | "**/server/**", 6 | "!**/node_modules/**", 7 | "!**/dist/**", 8 | "!**/models/**", 9 | "!**/server/translations/**", 10 | "!__tests__/__load__/libs/**/*.*" 11 | ], 12 | "testRegex": "(/__tests__/.*\\.test)\\.js$", 13 | "coverageReporters": ["json-summary", "text", "lcov"], 14 | "testPathIgnorePatterns": ["/dist/"], 15 | "moduleNameMapper": { 16 | "@server(.*)$": "/server/$1", 17 | "@(database|services|gql|middleware|daos|utils)(.*)$": "/server/$1/$2", 18 | "slack-notify": "/node_modules/slack-notify/src/cjs/index.js" 19 | }, 20 | "coverageThreshold": { 21 | "global": { 22 | "statements": 85, 23 | "branches": 85, 24 | "functions": 85, 25 | "lines": 85 26 | } 27 | }, 28 | "coveragePathIgnorePatterns": [ 29 | "/server/utils/routeLister", 30 | "/server/utils/random.js" 31 | ] 32 | } 33 | -------------------------------------------------------------------------------- /Licence: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2020-Present Wednesday Solutions 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /badges/badge-lines.svg: -------------------------------------------------------------------------------- 1 | Coverage:lines: 98.87%Coverage:lines98.87% -------------------------------------------------------------------------------- /badges/badge-branches.svg: -------------------------------------------------------------------------------- 1 | Coverage:branches: 97.31%Coverage:branches97.31% -------------------------------------------------------------------------------- /badges/badge-functions.svg: -------------------------------------------------------------------------------- 1 | Coverage:functions: 99.28%Coverage:functions99.28% -------------------------------------------------------------------------------- /badges/badge-statements.svg: -------------------------------------------------------------------------------- 1 | Coverage:statements: 98.89%Coverage:statements98.89% -------------------------------------------------------------------------------- /server/api/assignRoles/index.js: -------------------------------------------------------------------------------- 1 | import { validationResult } from 'express-validator'; 2 | import { apiFailure, apiSuccess } from 'utils/apiUtils'; 3 | import { clientCredentialsGrant, managementClient } from 'utils/auth0'; 4 | import assignRoleValidator from './validator'; 5 | 6 | const assignRoles = async (req, res) => { 7 | try { 8 | const errors = validationResult(req); 9 | if (!errors.isEmpty()) { 10 | throw { message: errors.errors[0].msg }; 11 | } 12 | const { authId, role } = req.body; 13 | const auth = await clientCredentialsGrant(); 14 | const mgmtAuth0 = await managementClient(auth); 15 | const rolesArr = await mgmtAuth0.getRole(); 16 | const roleInfo = rolesArr.filter(rol => role.includes(rol.name)); 17 | const roleIdArr = roleInfo.map(rol => rol.id); 18 | await mgmtAuth0.assignRolestoUser( 19 | { 20 | id: authId 21 | }, 22 | { roles: [...roleIdArr] } 23 | ); 24 | return apiSuccess(res, { message: 'Role updated successfully' }); 25 | } catch (err) { 26 | return apiFailure(res, err.message); 27 | } 28 | }; 29 | 30 | export { assignRoles, assignRoleValidator }; 31 | -------------------------------------------------------------------------------- /.eslintignore: -------------------------------------------------------------------------------- 1 | .DS_Store 2 | # Logs 3 | logs 4 | *.log 5 | npm-debug.log* 6 | yarn-debug.log* 7 | yarn-error.log* 8 | 9 | # Runtime data 10 | pids 11 | *.pid 12 | *.seed 13 | *.pid.lock 14 | 15 | # Directory for instrumented libs generated by jscoverage/JSCover 16 | lib-cov 17 | 18 | # Coverage directory used by tools like istanbul 19 | coverage 20 | 21 | # nyc test coverage 22 | .nyc_output 23 | 24 | # Grunt intermediate storage (http://gruntjs.com/creating-plugins#storing-task-files) 25 | .grunt 26 | 27 | # Bower dependency directory (https://bower.io/) 28 | bower_components 29 | 30 | # node-waf configuration 31 | .lock-wscript 32 | 33 | # Compiled binary addons (https://nodejs.org/api/addons.html) 34 | build/Release 35 | 36 | # Dependency directories 37 | node_modules/ 38 | jspm_packages/ 39 | 40 | # TypeScript v1 declaration files 41 | typings/ 42 | 43 | # Optional npm cache directory 44 | .npm 45 | 46 | # Optional eslint cache 47 | .eslintcache 48 | 49 | # Optional REPL history 50 | .node_repl_history 51 | 52 | # Output of 'npm pack' 53 | *.tgz 54 | 55 | # Yarn Integrity file 56 | .yarn-integrity 57 | 58 | # dotenv environment variables file 59 | .env 60 | .env.local 61 | 62 | # next.js build output 63 | .next 64 | __tests__/__load__/libs -------------------------------------------------------------------------------- /server/api/login/index.js: -------------------------------------------------------------------------------- 1 | import { validationResult } from 'express-validator'; 2 | import { apiFailure, apiSuccess } from 'utils/apiUtils'; 3 | import fetch from 'node-fetch'; 4 | import config from 'config'; 5 | import loginValidator from './validator'; 6 | 7 | const login = async (req, res) => { 8 | try { 9 | const errors = validationResult(req); 10 | if (!errors.isEmpty()) { 11 | throw { message: errors.errors[0].msg }; 12 | } 13 | const { username, password } = req.body; 14 | const payload = { 15 | username: username, 16 | password: password, 17 | client_id: config().frontendClientId, 18 | realm: config().connection, 19 | grant_type: config().frontendGrantType, 20 | audience: config().audience 21 | }; 22 | const response = await fetch(`https://${config().domain}/oauth/token`, { 23 | method: 'post', 24 | body: JSON.stringify(payload), 25 | headers: { 'Content-Type': 'application/json' } 26 | }); 27 | const user = await response.json(); 28 | return apiSuccess(res, user); 29 | } catch (err) { 30 | return apiFailure(res, err.message); 31 | } 32 | }; 33 | 34 | export { login, loginValidator }; 35 | -------------------------------------------------------------------------------- /server/api/users/validator.js: -------------------------------------------------------------------------------- 1 | import { SCOPE_TYPE } from 'utils/constants'; 2 | 3 | const { checkSchema } = require('express-validator'); 4 | export default checkSchema({ 5 | firstName: { 6 | in: ['body'], 7 | errorMessage: 'firstName must be present', 8 | isString: true 9 | }, 10 | lastName: { 11 | in: ['body'], 12 | errorMessage: 'lastName must be present', 13 | isString: true 14 | }, 15 | email: { 16 | in: ['body'], 17 | isEmail: true, 18 | errorMessage: 'email must be present' 19 | }, 20 | password: { 21 | in: ['body'], 22 | isLength: { 23 | errorMessage: 'Password should be at least 9 chars long', 24 | options: { min: 9 } 25 | }, 26 | 27 | errorMessage: 'password must be present' 28 | }, 29 | role: { 30 | in: ['body'], 31 | errorMessage: 'role must be present', 32 | isIn: { 33 | options: [ 34 | [ 35 | SCOPE_TYPE.SUPER_ADMIN, 36 | SCOPE_TYPE.STORE_ADMIN, 37 | SCOPE_TYPE.SUPPLIER_ADMIN 38 | ] 39 | ], 40 | errorMessage: 'Invalid role name.' 41 | }, 42 | isString: true 43 | } 44 | }); 45 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | .DS_Store 2 | # Logs 3 | logs 4 | *.log 5 | npm-debug.log* 6 | yarn-debug.log* 7 | yarn-error.log* 8 | 9 | # Runtime data 10 | pids 11 | *.pid 12 | *.seed 13 | *.pid.lock 14 | 15 | # Directory for instrumented libs generated by jscoverage/JSCover 16 | lib-cov 17 | 18 | # Coverage directory used by tools like istanbul 19 | coverage 20 | 21 | # nyc test coverage 22 | .nyc_output 23 | 24 | # Grunt intermediate storage (http://gruntjs.com/creating-plugins#storing-task-files) 25 | .grunt 26 | 27 | # Bower dependency directory (https://bower.io/) 28 | bower_components 29 | 30 | # node-waf configuration 31 | .lock-wscript 32 | 33 | # Compiled binary addons (https://nodejs.org/api/addons.html) 34 | build/Release 35 | 36 | # Dependency directories 37 | node_modules/ 38 | jspm_packages/ 39 | 40 | # TypeScript v1 declaration files 41 | typings/ 42 | 43 | # Optional npm cache directory 44 | .npm 45 | 46 | # Optional eslint cache 47 | .eslintcache 48 | 49 | # Optional REPL history 50 | .node_repl_history 51 | 52 | # Output of 'npm pack' 53 | *.tgz 54 | 55 | # Yarn Integrity file 56 | .yarn-integrity 57 | 58 | # dotenv environment variables file 59 | .env 60 | .env.local 61 | .env.development 62 | .env.docker 63 | 64 | # next.js build output 65 | .next 66 | 67 | dist 68 | .env.development 69 | .env.docker 70 | 71 | -------------------------------------------------------------------------------- /server/api/orders/updateRedis.js: -------------------------------------------------------------------------------- 1 | import { redis } from 'services/redis'; 2 | const moment = require('moment'); 3 | 4 | export const updateOrderDetailInRedis = async orderDetail => { 5 | const currentDate = moment().format('YYYY-MM-DD'); 6 | for (const product of orderDetail.purchasedProducts) { 7 | const redisAggregateCategory = JSON.parse( 8 | await redis.get(`${currentDate}_${product.category}`) 9 | ); 10 | await redis.set( 11 | `${currentDate}_${product.category}`, 12 | JSON.stringify({ 13 | total: 14 | redisAggregateCategory?.total + 15 | product.price * product.quantity || 16 | product.price * product.quantity, 17 | count: 18 | redisAggregateCategory?.count + product.quantity || 19 | product.quantity 20 | }) 21 | ); 22 | } 23 | 24 | const redisAggregate = JSON.parse(await redis.get(`${currentDate}_total`)); 25 | redis.set( 26 | `${currentDate}_total`, 27 | JSON.stringify({ 28 | total: 29 | redisAggregate?.total + orderDetail.totalPrice || 30 | orderDetail.totalPrice, 31 | count: redisAggregate?.count + 1 || 1 32 | }) 33 | ); 34 | }; 35 | -------------------------------------------------------------------------------- /setup-shards/configsvr/docker-compose.yml: -------------------------------------------------------------------------------- 1 | version: "3.8" 2 | services: 3 | cfgsvr1: 4 | image: mongo 5 | restart: on-failure 6 | container_name: cfgsvr1 7 | command: 8 | [ 9 | mongod, 10 | --configsvr, 11 | --bind_ip_all, 12 | --replSet, 13 | cfgrs, 14 | --port, 15 | "27017", 16 | --dbpath, 17 | /data/db, 18 | ] 19 | volumes: 20 | - cfgsvr1:/data/db 21 | ports: 22 | - 30001:27017 23 | cfgsvr2: 24 | image: mongo 25 | container_name: cfgsvr2 26 | command: 27 | [ 28 | mongod, 29 | --configsvr, 30 | --bind_ip_all, 31 | --replSet, 32 | cfgrs, 33 | --port, 34 | "27017", 35 | --dbpath, 36 | /data/db, 37 | ] 38 | volumes: 39 | - cfgsvr2:/data/db 40 | ports: 41 | - 30002:27017 42 | cfgsvr3: 43 | image: mongo 44 | container_name: cfgsvr3 45 | command: 46 | [ 47 | mongod, 48 | --configsvr, 49 | --bind_ip_all, 50 | --replSet, 51 | cfgrs, 52 | --port, 53 | "27017", 54 | --dbpath, 55 | /data/db, 56 | ] 57 | volumes: 58 | - cfgsvr3:/data/db 59 | ports: 60 | - 30003:27017 61 | volumes: 62 | cfgsvr1: {} 63 | cfgsvr2: {} 64 | cfgsvr3: {} 65 | -------------------------------------------------------------------------------- /.github/workflows/coverage-report-ci.yml: -------------------------------------------------------------------------------- 1 | name: Jest Coverage Report with Annotations (CI) 2 | on: 3 | pull_request_target: 4 | branches: 5 | - main 6 | jobs: 7 | coverage_report: 8 | name: Jest Coverage Report 9 | runs-on: ubuntu-latest 10 | strategy: 11 | matrix: 12 | node-version: [14.17.x] 13 | steps: 14 | - uses: actions/checkout@v2 15 | - name: Setup environment 16 | uses: actions/setup-node@v2 17 | with: 18 | cache: 'yarn' 19 | node-version: ${{ matrix.node-version }} 20 | - name: Get Threshold 21 | id: threshold 22 | uses: notiz-dev/github-action-json-property@release 23 | with: 24 | path: 'jest.config.json' 25 | prop_path: 'coverageThreshold.global.statements' 26 | 27 | - name: Install dependencies 28 | run: yarn 29 | 30 | - name: Test and generate coverage report 31 | uses: artiomtr/jest-coverage-report-action@v2.0-rc.4 32 | with: 33 | github-token: ${{ secrets.GITHUB_TOKEN }} 34 | threshold: ${{steps.threshold.outputs.prop}} 35 | package-manager: yarn 36 | custom-title: Jest Coverage Report 37 | -------------------------------------------------------------------------------- /server/middlewares/auth/ownershipBasedAccessControl.js: -------------------------------------------------------------------------------- 1 | import { get } from 'lodash'; 2 | import { set } from 'express-http-context'; 3 | import message from 'utils/i18n/message'; 4 | /** 5 | * function to check if the requester is the owner of resource or not 6 | * @param {string} requesterEmail 7 | * @param {Model} model 8 | * @param {object} configObj 9 | * @returns boolean value 10 | */ 11 | export const ownershipBasedAccessControl = async ( 12 | requesterEmail, 13 | model, 14 | configObj 15 | ) => { 16 | let resource = await model.findOne(configObj.condition); 17 | if (!resource) { 18 | throw new Error(message.RESOURCE_NOT_FOUND); 19 | } 20 | const resourceOwners = get(resource, configObj.ownerKey); 21 | const resourceOwnerEmail = Array.isArray(resourceOwners) 22 | ? resourceOwners.map(ele => ele.email) 23 | : [resourceOwners]; 24 | if (!resourceOwnerEmail.includes(requesterEmail)) { 25 | return false; 26 | } 27 | // Block to set thecondition for findAll api 28 | if (configObj.findAll) { 29 | configObj.findAll.value = get( 30 | resource, 31 | configObj.resourceOwnershipPath 32 | ); 33 | configObj.findAllCond = { 34 | [configObj.findAll.key]: configObj.findAll.value 35 | }; 36 | set('condition', configObj.findAllCond); 37 | } 38 | 39 | return true; 40 | }; 41 | -------------------------------------------------------------------------------- /setup-shards/README.md: -------------------------------------------------------------------------------- 1 | # Setup shards 2 | 3 | This will provision the following infrastructure 4 | 5 | - config server 6 | A replica set will be created with 2 secondary and 1 primary node.
7 | It will run on the following ports 8 | - 30001 9 | - 30002 10 | - 30003 11 | - shard server 1 12 | A replica set will be created with 2 secondary and 1 primary node.
13 | It will run on the following ports 14 | - 50001 15 | - 50002 16 | - 50003 17 | - shard server 2 18 | A replica set will be created with 2 secondary and 1 primary node.
19 | It will run on the following ports 20 | - 50004 21 | - 50005 22 | - 50006 23 | - shard server 3 24 | A replica set will be created with 2 secondary and 1 primary node.
25 | It will run on the following ports 26 | - 50007 27 | - 50008 28 | - 50009 29 | - shard server 4 30 | A replica set will be created with 2 secondary and 1 primary node.
31 | It will run on the following ports 32 | - 50010 33 | - 50011 34 | - 50012 35 | - mongos 36 | This is the router and will be running on port 60000 37 | 38 | Use the following command to connect to mongos 39 | 40 | ``` 41 | mongosh mongodb://`ipconfig getifaddr en0`:60000 42 | ``` 43 | 44 | alternatively run 45 | 46 | ``` 47 | ip=`ipconfig getifaddr en0` 48 | mongosh mongodb://$ip:60000 49 | ``` 50 | -------------------------------------------------------------------------------- /__tests__/server/database/mongo.test.js: -------------------------------------------------------------------------------- 1 | import mongoose from 'mongoose'; 2 | import { getMongoUri } from 'utils/mongoConstants'; 3 | import { mongoConnector } from 'database/mongo'; 4 | 5 | jest.mock('mongoose', () => ({ 6 | connection: { 7 | readyState: 1, 8 | on: jest.fn().mockImplementation((_, cb) => cb()), 9 | once: jest.fn().mockImplementation((_, cb) => cb()) 10 | }, 11 | connect: jest.fn() 12 | })); 13 | 14 | describe('mongooseConnector tests', () => { 15 | it('should create new mongodb connection if the connection is not made', async () => { 16 | mongoose.connection = { 17 | ...mongoose.connection, 18 | readyState: 0 19 | }; 20 | 21 | await mongoConnector(); 22 | expect(mongoose.connect).toBeCalledWith(getMongoUri()); 23 | expect(mongoose.connection.on).toBeCalledWith( 24 | 'error', 25 | expect.any(Function) 26 | ); 27 | expect(mongoose.connection.once).toBeCalledWith( 28 | 'open', 29 | expect.any(Function) 30 | ); 31 | }); 32 | 33 | it('should return mongodb connection if the connection already exists', async () => { 34 | mongoose.connection = { 35 | ...mongoose.connection, 36 | readyState: 1 37 | }; 38 | 39 | await mongoConnector(); 40 | expect(mongoose.connect).not.toBeCalled(); 41 | }); 42 | }); 43 | -------------------------------------------------------------------------------- /__tests__/server/api/cronJob/index.test.js: -------------------------------------------------------------------------------- 1 | import supertest from 'supertest'; 2 | import app from 'server'; 3 | import * as job from 'utils/custom/scheduleJob'; 4 | 5 | describe('cronJob tests', () => { 6 | it('should throw error when correct parameters are not passed', async () => { 7 | const res = await supertest(app) 8 | .post('/cron-job') 9 | .set('Accept', 'application/json') 10 | .send({}); 11 | expect(res.statusCode).toBe(500); 12 | expect(res.error.text).toEqual( 13 | '{"error":"scheduleIn must be present"}' 14 | ); 15 | }); 16 | 17 | it('should ensure it return 200 when called with correct parameters', async () => { 18 | const payload = { 19 | scheduleIn: 2000, 20 | message: 'This message should be consoled at the scheduled time', 21 | queueName: 'scheduleJob' 22 | }; 23 | let apiRes = { data: { success: 'true' } }; 24 | const spy = jest 25 | .spyOn(job, 'scheduleJob') 26 | .mockImplementationOnce(() => apiRes.data); 27 | const res = await supertest(app) 28 | .post('/cron-job') 29 | .set('Accept', 'application/json') 30 | .send(payload); 31 | expect(spy).toHaveBeenCalledWith( 32 | payload.scheduleIn, 33 | payload.message, 34 | payload.queueName 35 | ); 36 | expect(res.body).toEqual(apiRes); 37 | }); 38 | }); 39 | -------------------------------------------------------------------------------- /__tests__/server/api/login/index.test.js: -------------------------------------------------------------------------------- 1 | jest.mock('node-fetch'); 2 | import supertest from 'supertest'; 3 | import fetch from 'node-fetch'; 4 | const { Response } = jest.requireActual('node-fetch'); 5 | 6 | import app from 'server'; 7 | import { mockData } from 'utils/mockData'; 8 | const { MOCK_USER_LOGIN: mockUserLogin } = mockData; 9 | 10 | describe('login endpoint tests', () => { 11 | it('should throw an error whan correct parameters are not passsed', async () => { 12 | const res = await supertest(app) 13 | .post('/login') 14 | .send({}) 15 | .set('Accept', 'application/json'); 16 | expect(res.statusCode).toBe(500); 17 | expect(res.error.text).toBe('{"error":"username must be present"}'); 18 | }); 19 | it('should ensure it return 200 and gives token in the response ', async () => { 20 | fetch.mockReturnValue( 21 | Promise.resolve( 22 | new Response( 23 | '{"access_token": "eyJhbGciOiJSUzI1NiIsInR5cCI6IkpXV","scope": "read:current_user","expires_in": 864009,"token_type": "Bearer"}' 24 | ) 25 | ) 26 | ); 27 | const res = await supertest(app) 28 | .post('/login') 29 | .send({ 30 | username: 'doe@wednesday.is', 31 | password: 'doe@12345' 32 | }) 33 | .set('Accept', 'application/json'); 34 | expect(res.statusCode).toBe(200); 35 | expect(res.body).toEqual(mockUserLogin); 36 | }); 37 | }); 38 | -------------------------------------------------------------------------------- /__tests__/server/services/circuitBreaker.test.js: -------------------------------------------------------------------------------- 1 | import { newCircuitBreaker } from 'services/circuitBreaker'; 2 | 3 | describe('newCircuitBreaker tests', () => { 4 | const fallbackMessage = 'Some fallback message'; 5 | it('should return response from the api', async () => { 6 | const data = 'this is some api response'; 7 | const someFunc = async () => ({ data }); 8 | const testme = 'testme'; 9 | const breaker = new newCircuitBreaker(someFunc, fallbackMessage); 10 | const res = await breaker.fire(testme); 11 | expect(res.data).toBe(data); 12 | }); 13 | 14 | it('should return the fallback message if the API throws an error', async () => { 15 | const customError = 'This is some error'; 16 | 17 | const somefunc = async () => { 18 | throw new Error(customError); 19 | }; 20 | const testme = 'testme'; 21 | const breaker = newCircuitBreaker(somefunc, fallbackMessage); 22 | const res = await breaker.fire(testme); 23 | expect(res).toBe(`${fallbackMessage}. ${customError}`); 24 | }); 25 | it('should return the fallback message if the API throws an error without a message.', async () => { 26 | const somefunc = async () => { 27 | throw new Error(); 28 | }; 29 | const testme = 'testme'; 30 | const breaker = newCircuitBreaker(somefunc, fallbackMessage); 31 | const res = await breaker.fire(testme); 32 | expect(res).toBe(`${fallbackMessage}. Error`); 33 | }); 34 | }); 35 | -------------------------------------------------------------------------------- /__tests__/server/daos/product.test.js: -------------------------------------------------------------------------------- 1 | import { redis } from 'services/redis'; 2 | import { mockData } from 'utils/mockData'; 3 | import { getAllCategories } from 'daos/product'; 4 | 5 | const { MOCK_CATEGORIES: mockCategories } = mockData; 6 | describe('Products dao tests', () => { 7 | let mockingoose; 8 | let model = 'products'; 9 | let mockError = new Error('Mock Error'); 10 | beforeEach(() => { 11 | mockingoose = require('mockingoose'); 12 | }); 13 | describe('getAllCategory functions tests', () => { 14 | it('should return all categories and set value in redis', async () => { 15 | jest.spyOn(redis, 'get').mockResolvedValueOnce(''); 16 | jest.spyOn(redis, 'set'); 17 | mockingoose(model).toReturn(mockCategories, 'distinct'); 18 | const res = await getAllCategories(); 19 | expect(res).toBe(mockCategories); 20 | }); 21 | 22 | it('should return all categriesfrom redis', async () => { 23 | jest.spyOn(redis, 'get').mockImplementation(() => 24 | JSON.stringify(mockCategories) 25 | ); 26 | const res = await getAllCategories(); 27 | expect(res).toEqual(mockCategories); 28 | }); 29 | 30 | it('should throw an error when an error is thrown form db', async () => { 31 | jest.spyOn(redis, 'get').mockResolvedValueOnce(''); 32 | mockingoose(model).toReturn(mockError, 'distinct'); 33 | expect(async () => { 34 | await getAllCategories(); 35 | }).rejects.toThrow(mockError); 36 | }); 37 | }); 38 | }); 39 | -------------------------------------------------------------------------------- /__tests__/server/api/referencedOrders/index.test.js: -------------------------------------------------------------------------------- 1 | import supertest from 'supertest'; 2 | import kebabCase from 'lodash/kebabCase'; 3 | import app from 'server'; 4 | import { mockData } from 'utils/mockData'; 5 | 6 | const { MOCK_UNSHARDED_REFERENCED_ORDERS: mockReferencedOrders } = mockData; 7 | jest.mock('middlewares/auth', () => ({ 8 | checkJwt: (req, res, next) => { 9 | next(); 10 | } 11 | })); 12 | describe('fetchAllReferencedOrders tests', () => { 13 | let MODEL_NAME; 14 | let ENDPOINT; 15 | let mockingoose; 16 | 17 | beforeAll(() => { 18 | MODEL_NAME = 'referencedOrders'; 19 | ENDPOINT = `/${kebabCase(MODEL_NAME)}`; 20 | }); 21 | 22 | beforeEach(() => { 23 | mockingoose = require('mockingoose'); 24 | }); 25 | 26 | it('should fetch all referenced orders', async () => { 27 | mockingoose(MODEL_NAME).toReturn(mockReferencedOrders.data, 'find'); 28 | const res = await supertest(app) 29 | .get(ENDPOINT) 30 | .set('Accept', 'application/json'); 31 | 32 | expect(res.statusCode).toBe(200); 33 | expect(res.body).toEqual(mockReferencedOrders); 34 | }); 35 | 36 | it('should fail to fetch all referenced orders if something goes wrong', async () => { 37 | const error = new Error('unable to fetch referenced orders'); 38 | mockingoose(MODEL_NAME).toReturn(error, 'find'); 39 | 40 | const res = await supertest(app) 41 | .get(ENDPOINT) 42 | .set('Accept', 'application/json'); 43 | 44 | expect(res.statusCode).toBe(500); 45 | expect(res.body.error).toBe(error.message); 46 | }); 47 | }); 48 | -------------------------------------------------------------------------------- /__tests__/server/api/unshardedReferencedOrders/index.test.js: -------------------------------------------------------------------------------- 1 | import supertest from 'supertest'; 2 | import app from 'server'; 3 | import { mockData } from 'utils/mockData'; 4 | import kebabCase from 'lodash/kebabCase'; 5 | 6 | const { MOCK_UNSHARDED_REFERENCED_ORDERS: mockUnshardedReferencedOrders } = 7 | mockData; 8 | jest.mock('middlewares/auth', () => ({ 9 | checkJwt: (req, res, next) => { 10 | next(); 11 | } 12 | })); 13 | describe('fetchAllUnshardedReferencedOrders tests', () => { 14 | let MODEL_NAME; 15 | let ENDPOINT; 16 | let mockingoose; 17 | 18 | beforeAll(() => { 19 | MODEL_NAME = 'unshardedReferencedOrders'; 20 | ENDPOINT = `/${kebabCase(MODEL_NAME)}`; 21 | }); 22 | 23 | beforeEach(() => { 24 | mockingoose = require('mockingoose'); 25 | }); 26 | it('should fetch all unsharded referenced orders', async () => { 27 | mockingoose(MODEL_NAME).toReturn( 28 | mockUnshardedReferencedOrders.data, 29 | 'find' 30 | ); 31 | const res = await supertest(app) 32 | .get(ENDPOINT) 33 | .set('Accept', 'application/json'); 34 | 35 | expect(res.statusCode).toBe(200); 36 | expect(res.body).toEqual(mockUnshardedReferencedOrders); 37 | }); 38 | 39 | it('should fail to fetch if something goes wrong', async () => { 40 | const error = new Error('unable to fetch unsharded referenced orders'); 41 | mockingoose(MODEL_NAME).toReturn(error, 'find'); 42 | const res = await supertest(app) 43 | .get(ENDPOINT) 44 | .set('Accept', 'application/json'); 45 | 46 | expect(res.statusCode).toBe(500); 47 | expect(res.body.error).toBe(error.message); 48 | }); 49 | }); 50 | -------------------------------------------------------------------------------- /server/utils/index.js: -------------------------------------------------------------------------------- 1 | import fs from 'fs'; 2 | import mongoose from 'mongoose'; 3 | import { apiFailure } from './apiUtils'; 4 | 5 | export const isTestEnv = () => 6 | process.env.ENVIRONMENT_NAME === 'test' || process.env.NODE_ENV === 'test'; 7 | 8 | export const getModelFiles = modelsFolderPath => { 9 | if (typeof modelsFolderPath !== 'string') { 10 | throw new Error('modelPathString is invalid'); 11 | } 12 | return fs 13 | .readdirSync(modelsFolderPath) 14 | .filter(file => fs.lstatSync(modelsFolderPath + file).isFile()); 15 | }; 16 | 17 | export const validateObjectId = (req, res, next) => { 18 | if (!mongoose.Types.ObjectId.isValid(req.params._id)) { 19 | return apiFailure(res, { message: 'Invalid ObjectId' }); 20 | } 21 | next(); 22 | }; 23 | 24 | export const validateSchema = model => (req, res, next) => { 25 | const doc = new model(req.body); 26 | doc.validate(function (err) { 27 | if (err) { 28 | return apiFailure(res, err.errors); 29 | } 30 | next(); 31 | }); 32 | }; 33 | 34 | export const validateReqBody = model => (req, res, next) => { 35 | const validKeys = model.schema.obj; 36 | const keys = Object.keys(req.body); 37 | if (keys.length) { 38 | let isValid = true; 39 | for (let i = 0; i < keys.length; i++) { 40 | if (!validKeys[keys[i]]) { 41 | isValid = false; 42 | break; 43 | } 44 | } 45 | if (!isValid) { 46 | return apiFailure(res, { 47 | message: 'Request schema is invalid' 48 | }); 49 | } 50 | next(); 51 | } else { 52 | return apiFailure(res, { 53 | message: 'Request schema is invalid' 54 | }); 55 | } 56 | }; 57 | -------------------------------------------------------------------------------- /server/utils/queue.js: -------------------------------------------------------------------------------- 1 | import Bull from 'bull'; 2 | import moment from 'moment'; 3 | import { aggregateCheck } from 'api/cronJob/aggregateJob'; 4 | const queues = {}; 5 | // 1 6 | export const QUEUE_NAMES = { 7 | SCHEDULE_JOB: 'scheduleJob', 8 | AGGREGATE_CHECK: 'aggregateCheck' 9 | }; 10 | // 2 11 | const CRON_EXPRESSIONS = { 12 | MIDNIGHT: '0 0 * * *' 13 | }; 14 | 15 | export const QUEUE_PROCESSORS = { 16 | [QUEUE_NAMES.SCHEDULE_JOB]: (job, done) => { 17 | console.log( 18 | `${moment()}::Job with id: ${job.id} is being executed.\n`, 19 | { 20 | message: job.data.message 21 | } 22 | ); 23 | done(); 24 | }, 25 | [QUEUE_NAMES.AGGREGATE_CHECK]: (job, done) => { 26 | console.log('Aggegate job is getting executed.'); 27 | aggregateCheck(); 28 | done(); 29 | } 30 | }; 31 | // 3 32 | export const initQueues = () => { 33 | console.log('init queues'); 34 | Object.keys(QUEUE_PROCESSORS).forEach(queueName => { 35 | // 4 36 | queues[queueName] = getQueue(queueName); 37 | // 5 38 | queues[queueName].process(QUEUE_PROCESSORS[queueName]); 39 | }); 40 | queues[QUEUE_NAMES.AGGREGATE_CHECK].add( 41 | {}, 42 | { repeat: { cron: CRON_EXPRESSIONS.MIDNIGHT } } 43 | ); 44 | }; 45 | export const getQueue = queueName => { 46 | if (!queues[queueName]) { 47 | queues[queueName] = new Bull( 48 | queueName, 49 | `redis://${process.env.REDIS_DOMAIN}:${process.env.REDIS_PORT}` 50 | ); 51 | console.log( 52 | 'created queue: ', 53 | queueName, 54 | `redis://${process.env.REDIS_DOMAIN}:${process.env.REDIS_PORT}` 55 | ); 56 | } 57 | return queues[queueName]; 58 | }; 59 | -------------------------------------------------------------------------------- /server/index.js: -------------------------------------------------------------------------------- 1 | import http from 'http'; 2 | import i18n from 'i18n'; 3 | import path from 'path'; 4 | import express from 'express'; 5 | import helmet from 'helmet'; 6 | import responseTime from 'response-time'; 7 | import cors from 'cors'; 8 | import bodyParser from 'body-parser'; 9 | import { apiSuccess } from 'utils/apiUtils'; 10 | import apis from 'api'; 11 | import { list } from 'utils/routeLister'; 12 | import log from 'utils/logger'; 13 | import { isTestEnv } from 'utils'; 14 | import { initQueues } from 'utils/queue'; 15 | import { injectRequestId } from 'middlewares/injectRequestId'; 16 | import { checkJwt } from 'middlewares/auth'; 17 | import { middleware as contextMiddleware } from 'express-http-context'; 18 | /** 19 | * Create express server 20 | */ 21 | 22 | i18n.configure({ 23 | locales: ['en', 'hi'], 24 | directory: path.join(__dirname, './translations') 25 | }); 26 | 27 | const app = express(); 28 | app.use(i18n.init); 29 | app.use(responseTime()); 30 | app.set('port', process.env.PORT || 9000); 31 | app.use(helmet()); 32 | app.use(cors()); 33 | app.use(injectRequestId()); 34 | 35 | app.use(express.json()); 36 | // get information from html forms 37 | app.use(bodyParser.json({ limit: '10mb' })); 38 | app.use(bodyParser.urlencoded({ extended: true })); 39 | app.use(checkJwt); 40 | 41 | // used for getting and setting request-scoped context anywhere 42 | app.use(contextMiddleware); 43 | 44 | // setup database 45 | apis(app); 46 | /* istanbul ignore next */ 47 | if (!isTestEnv()) { 48 | initQueues(); 49 | } 50 | 51 | app.get('/', (req, res) => { 52 | apiSuccess(res, res.locals.__('response.health_check')); 53 | }); 54 | list(app); 55 | 56 | if (process.env.NODE_ENV !== 'test') { 57 | const server = http.createServer(app); 58 | server.listen(app.get('port'), () => { 59 | log.info('Server is running at port %s', app.get('port')); 60 | }); 61 | } 62 | 63 | export default app; 64 | -------------------------------------------------------------------------------- /server/api/index.js: -------------------------------------------------------------------------------- 1 | import path from 'path'; 2 | import express from 'express'; 3 | import kebab from 'lodash/kebabCase'; 4 | import { generateRequest } from 'api/requestGenerators'; 5 | import { mongoConnector } from '../database/mongo'; 6 | import { customApisMapper, REQUEST_TYPES } from 'api/customApisMapper'; 7 | import customRoutes from 'server/api/routes'; 8 | import { getModelFiles, isTestEnv } from 'utils'; 9 | import { registerSwagger } from 'utils/swagUtils'; 10 | 11 | /* istanbul ignore next */ 12 | if (!isTestEnv()) { 13 | mongoConnector(); 14 | } 15 | 16 | export default app => { 17 | autoGenerateApisFromModels(app); 18 | // Custom api 19 | app.use('/', customRoutes); 20 | registerSwagger(app); 21 | }; 22 | 23 | const autoGenerateApisFromModels = app => { 24 | let modelsFolderPath = path.join( 25 | __dirname, 26 | '../../server/database/models/' 27 | ); 28 | const fileArray = getModelFiles(modelsFolderPath); 29 | fileArray.forEach(f => { 30 | // eslint-disable-next-line prefer-template 31 | const { model } = require(`server/database/models/` + f); 32 | const name = f.split('.')[0]; 33 | 34 | apiGeneratorFactory(app, name, model); 35 | }); 36 | }; 37 | 38 | const apiGeneratorFactory = (app, name, model) => { 39 | const router = express.Router(); 40 | Object.values(REQUEST_TYPES).forEach(type => { 41 | if (!customApisMapper[name]?.methods.map(m => m.type).includes(type)) { 42 | // auto generate api 43 | generateRequest(type, router, model); 44 | } else { 45 | const customApi = customApisMapper[name].methods.find( 46 | m => m.type === type 47 | ); 48 | if (customApi.handler) { 49 | customApi.handler(router, model, customApi.validator); 50 | } 51 | } 52 | }); 53 | app.use(`/${kebab(name)}`, router); 54 | }; 55 | -------------------------------------------------------------------------------- /.eslintrc.js: -------------------------------------------------------------------------------- 1 | const fs = require('fs'); 2 | const path = require('path'); 3 | 4 | const prettierOptions = JSON.parse( 5 | fs.readFileSync(path.resolve(__dirname, '.prettierrc'), 'utf8') 6 | ); 7 | 8 | module.exports = { 9 | parser: '@babel/eslint-parser', 10 | extends: ['prettier'], 11 | plugins: ['prettier'], 12 | env: { 13 | jest: true, 14 | browser: false, 15 | node: true, 16 | es6: true 17 | }, 18 | parserOptions: { 19 | ecmaVersion: 6, 20 | sourceType: 'module' 21 | }, 22 | rules: { 23 | 'import/no-webpack-loader-syntax': 0, 24 | 'key-spacing': [2, { beforeColon: false, afterColon: true }], 25 | 'arrow-parens': ['error', 'as-needed'], 26 | 'prettier/prettier': ['error', prettierOptions], 27 | 'arrow-body-style': [2, 'as-needed'], 28 | 'class-methods-use-this': 0, 29 | 'import/imports-first': 0, 30 | 'import/newline-after-import': 0, 31 | 'import/no-dynamic-require': 0, 32 | 'import/no-extraneous-dependencies': 0, 33 | 'import/no-named-as-default': 0, 34 | 'import/no-unresolved': 0, 35 | 'import/prefer-default-export': 0, 36 | 'no-param-reassign': 0, 37 | 'max-len': 0, 38 | 'newline-per-chained-call': 0, 39 | 'no-confusing-arrow': 0, 40 | 'no-unused-vars': ['error', { args: 'none' }], 41 | 'no-use-before-define': 0, 42 | 'prefer-template': 2, 43 | 'require-yield': 0 44 | }, 45 | settings: { 46 | 'import/resolver': { 47 | node: { 48 | app: './app', 49 | context: 'app', 50 | resolve: { 51 | app: './app', 52 | paths: ['app'], 53 | modules: ['app', 'node_modules'], 54 | extensions: ['.js', '.json', '.coffee'] 55 | } 56 | } 57 | } 58 | } 59 | }; 60 | -------------------------------------------------------------------------------- /__tests__/server/api/roles/index.test.js: -------------------------------------------------------------------------------- 1 | import supertest from 'supertest'; 2 | import app from 'server'; 3 | import { mockData } from 'utils/mockData'; 4 | const { MOCK_ROLE: mockRole } = mockData; 5 | 6 | jest.mock('express-jwt', () => secret => (req, res, next) => { 7 | req['user'] = { 8 | 'https://express-demo/roles': ['ADMIN', 'SUPER_ADMIN'] 9 | }; 10 | next(null, {}); 11 | }); 12 | 13 | jest.mock('auth0', () => ({ 14 | AuthenticationClient: () => ({ 15 | clientCredentialsGrant: () => ({ access_token: 'access' }) 16 | }), 17 | ManagementClient: () => ({ 18 | createRole: () => mockRole.data 19 | }) 20 | })); 21 | describe('roles endpoint tests', () => { 22 | it('should throw an error when correct parameters are not passed', async () => { 23 | const res = await supertest(app) 24 | .post('/roles') 25 | .set('Accept', 'application/json') 26 | .send({}); 27 | expect(res.statusCode).toBe(500); 28 | expect(res.error.text).toBe('{"error":"Invalid role name."}'); 29 | }); 30 | 31 | it('should throw an error when invalid role is passed', async () => { 32 | const res = await supertest(app) 33 | .post('/roles') 34 | .set('Accept', 'application/json') 35 | .send({ 36 | name: 'Manager', 37 | description: 'Managing the user data.' 38 | }); 39 | expect(res.statusCode).toBe(500); 40 | expect(res.error.text).toBe('{"error":"Invalid role name."}'); 41 | }); 42 | it('should create role when valid role and all parameters are passed', async () => { 43 | const res = await supertest(app) 44 | .post('/roles') 45 | .set('Accept', 'application/json') 46 | .send({ 47 | name: 'SUPER_ADMIN', 48 | description: 'Can access all data.' 49 | }); 50 | expect(res.statusCode).toBe(200); 51 | expect(res.body).toEqual(mockRole); 52 | }); 53 | }); 54 | -------------------------------------------------------------------------------- /server/api/utils.js: -------------------------------------------------------------------------------- 1 | var httpContext = require('express-http-context'); 2 | export const createItem = async (model, args) => { 3 | try { 4 | return model.create(args); 5 | } catch (err) { 6 | log.info({ err }); 7 | throw err; 8 | } 9 | }; 10 | 11 | // no page & no limit 12 | // have page & have limit 13 | // have limit > 100 14 | 15 | export const fetchItems = async (model, query) => { 16 | try { 17 | if (!query.limit || query.limit > 100) { 18 | query.limit = 100; 19 | } 20 | query.page = query.page || 0; 21 | const condition = httpContext.get('condition') || {}; 22 | return model 23 | .find(condition) 24 | .skip(query.page * query.limit) 25 | .limit(query.limit); 26 | } catch (err) { 27 | log.info({ err }); 28 | throw err; 29 | } 30 | }; 31 | 32 | export const fetchItem = async (model, args) => { 33 | try { 34 | console.log({ args }); 35 | return model.findOne(args); 36 | } catch (err) { 37 | log.info({ err }); 38 | throw err; 39 | } 40 | }; 41 | export const updateItem = async (model, where, args) => { 42 | try { 43 | await model.updateOne(where, args); 44 | return fetchItem(model, where); 45 | } catch (err) { 46 | log.info({ err }); 47 | throw err; 48 | } 49 | }; 50 | export const deleteItem = async (model, where) => { 51 | try { 52 | return model.deleteOne(where); 53 | } catch (err) { 54 | log.info({ err }); 55 | throw err; 56 | } 57 | }; 58 | 59 | export const createUser = async (model, args) => { 60 | try { 61 | return model.create(args); 62 | } catch (err) { 63 | log.info({ err }); 64 | throw err; 65 | } 66 | }; 67 | 68 | export const fetchAllPurchasedProducts = async (model, query) => 69 | model 70 | .find() 71 | .select('purchasedProducts') 72 | .populate('purchasedProducts') 73 | .skip(query.page * query.limit) 74 | .limit(query.limit); 75 | -------------------------------------------------------------------------------- /server/api/customApisMapper.js: -------------------------------------------------------------------------------- 1 | import { fetchAllReferencedOrders } from 'api/referencedOrders'; 2 | import { fetchAllUnshardedOrders } from 'api/unshardedOrders'; 3 | import { fetchAllUnshardedReferencedOrders } from 'api/unshardedReferencedOrders'; 4 | import userValidator from './users/validator'; 5 | import { createUser } from 'api/users'; 6 | import { createOrder, orderValidator } from './orders'; 7 | 8 | export const REQUEST_TYPES = { 9 | create: 'CREATE', 10 | update: 'UPDATE', 11 | fetchOne: 'FETCHONE', 12 | fetchAll: 'FETCHALL', 13 | remove: 'REMOVE' 14 | }; 15 | 16 | export const customApisMapper = { 17 | orders: { 18 | methods: [ 19 | { 20 | type: REQUEST_TYPES.create, 21 | handler: (router, model, validator) => { 22 | router.post('/', validator, async (req, res, next) => 23 | createOrder(req, res) 24 | ); 25 | }, 26 | validator: orderValidator 27 | }, 28 | { 29 | type: REQUEST_TYPES.update 30 | }, 31 | { 32 | type: REQUEST_TYPES.remove 33 | } 34 | ] 35 | }, 36 | referencedOrders: { 37 | methods: [ 38 | { 39 | type: REQUEST_TYPES.fetchAll, 40 | handler: fetchAllReferencedOrders 41 | } 42 | ] 43 | }, 44 | unshardedOrders: { 45 | methods: [ 46 | { 47 | type: REQUEST_TYPES.fetchAll, 48 | handler: fetchAllUnshardedOrders 49 | } 50 | ] 51 | }, 52 | unshardedReferencedOrders: { 53 | methods: [ 54 | { 55 | type: REQUEST_TYPES.fetchAll, 56 | handler: fetchAllUnshardedReferencedOrders 57 | } 58 | ] 59 | }, 60 | users: { 61 | methods: [ 62 | { 63 | type: REQUEST_TYPES.create, 64 | handler: createUser, 65 | validator: userValidator 66 | } 67 | ] 68 | } 69 | }; 70 | -------------------------------------------------------------------------------- /server/utils/mockData.js: -------------------------------------------------------------------------------- 1 | export const mockData = { 2 | MOCK_USER: { 3 | _id: 1, 4 | firstName: 'Jhon', 5 | lastName: 'Doe', 6 | email: 'doe@wednesday.is', 7 | authId: 'auth0|6241b09d4bd9006f9a45cf' 8 | }, 9 | MOCK_USER_LOGIN: { 10 | data: { 11 | access_token: 'eyJhbGciOiJSUzI1NiIsInR5cCI6IkpXV', 12 | scope: 'read:current_user', 13 | expires_in: 864009, 14 | token_type: 'Bearer' 15 | } 16 | }, 17 | MOCK_ROLE: { 18 | data: { 19 | id: 'rol_XeIAH73iey2dLBNn', 20 | name: 'STORE_ADMIN', 21 | description: 'Can access endpoints related to users' 22 | } 23 | }, 24 | MOCK_TOTAL_AMT: [ 25 | { 26 | _id: null, 27 | totalPrice: 25000 28 | } 29 | ], 30 | MOCK_EARLIEST_CREATED_DATE: { 31 | createdAt: '1994-10-24', 32 | purchasedProducts: [] 33 | }, 34 | MOCK_TOTAL_COUNT: [ 35 | { 36 | __id: null, 37 | totalOrder: 1500 38 | } 39 | ], 40 | MOCK_CATEGORIES: ['Sports', 'Automotive', 'Tools'], 41 | MOCK_ORDER: { 42 | purchasedProducts: [ 43 | { 44 | name: 'Generic Cotton Sausages', 45 | price: 43000, 46 | category: 'Garden', 47 | quantity: 1, 48 | _id: '624c1ad1527d4e2840408142' 49 | } 50 | ], 51 | totalPrice: 43000 52 | }, 53 | MOCK_ORDER_DETAILS: { 54 | purchasedProducts: [ 55 | { 56 | name: 'Generic Cotton Sausages', 57 | price: 43000, 58 | category: 'Garden', 59 | quantity: 1 60 | } 61 | ], 62 | totalPrice: 43000, 63 | createdAt: '2022-04-06' 64 | }, 65 | MOCK_UNSHARDED_REFERENCED_ORDERS: { 66 | data: [ 67 | { 68 | _id: '624ae6bcae38f95bb8e0ea74', 69 | purchasedProducts: [], 70 | totalPrice: 100 71 | } 72 | ] 73 | } 74 | }; 75 | -------------------------------------------------------------------------------- /server/api/orders/index.js: -------------------------------------------------------------------------------- 1 | import { validationResult } from 'express-validator'; 2 | import { 3 | createNewOrder, 4 | totalAmtForDate, 5 | earliestCreatedDate, 6 | totalByDateForCategory, 7 | countByDate, 8 | countByDateForCategory 9 | } from 'daos/order'; 10 | import orderValidator from './validator'; 11 | import { updateOrderDetailInRedis } from './updateRedis'; 12 | import { apiFailure, apiSuccess } from 'utils/apiUtils'; 13 | 14 | export const createOrder = async (req, res) => { 15 | try { 16 | const errors = validationResult(req); 17 | if (!errors.isEmpty()) { 18 | throw { message: errors.errors[0].msg }; 19 | } 20 | const order = await createNewOrder(req.body); 21 | updateOrderDetailInRedis(order); 22 | return apiSuccess(res, order); 23 | } catch (err) { 24 | return apiFailure(res, err.message, 400); 25 | } 26 | }; 27 | 28 | export const getTotalOrderAmtForDate = async date => { 29 | try { 30 | const totalAmt = await totalAmtForDate(date); 31 | return totalAmt; 32 | } catch (error) { 33 | throw error; 34 | } 35 | }; 36 | 37 | export const getEarliestOrderCreatedDate = async () => { 38 | try { 39 | const earliestDate = await earliestCreatedDate(); 40 | return earliestDate; 41 | } catch (error) { 42 | throw error; 43 | } 44 | }; 45 | 46 | export const getTotalOrderAmtByDateForCategory = async (date, category) => { 47 | try { 48 | const totalAmt = await totalByDateForCategory(date, category); 49 | return totalAmt; 50 | } catch (error) { 51 | throw error; 52 | } 53 | }; 54 | 55 | export const getTotalOrderCountByDate = async date => { 56 | try { 57 | const totalOrderCount = await countByDate(date); 58 | return totalOrderCount; 59 | } catch (error) { 60 | throw error; 61 | } 62 | }; 63 | 64 | export const getTotalOrderCountByDateForCategory = async (date, category) => { 65 | try { 66 | const totalOrderCount = await countByDateForCategory(date, category); 67 | return totalOrderCount; 68 | } catch (error) { 69 | throw error; 70 | } 71 | }; 72 | 73 | export { orderValidator }; 74 | -------------------------------------------------------------------------------- /__tests__/server/utils/queue.test.js: -------------------------------------------------------------------------------- 1 | import { getQueue, initQueues, QUEUE_PROCESSORS } from 'utils/queue'; 2 | import moment from 'moment'; 3 | import * as queue from 'utils/queue'; 4 | import * as cronJob from 'api/cronJob/aggregateJob'; 5 | 6 | describe('Queue tests', () => { 7 | it('getQueues should create a queue if not present', async () => { 8 | const sampleQueue = 'sampleQueue'; 9 | const res = await getQueue(sampleQueue); 10 | expect(res.data).toEqual(sampleQueue); 11 | }); 12 | it('should return the queue if already present ', async () => { 13 | jest.spyOn(queue, 'getQueue').mockImplementation(name => { 14 | const queueName = name; 15 | const queues = {}; 16 | queues[queueName] = { 17 | data: 'This is sample queue', 18 | process: jest.fn() 19 | }; 20 | return queues[name]; 21 | }); 22 | const res = await getQueue('sampleQueue'); 23 | expect(res.data).toBe('This is sample queue'); 24 | jest.spyOn(queue, 'getQueue').mockClear(); 25 | }); 26 | it('should initialize the queues', async () => { 27 | jest.spyOn(QUEUE_PROCESSORS, 'scheduleJob').mockImplementation(() => ({ 28 | sampleQueue: job => ({ 29 | message: job.message 30 | }) 31 | })); 32 | jest.spyOn(cronJob, 'aggregateCheck').mockImplementation(() => {}); 33 | jest.spyOn(console, 'log'); 34 | await initQueues(); 35 | expect(console.log.mock.calls[0][0]).toBe('init queues'); 36 | jest.spyOn(console, 'log').mockClear(); 37 | }); 38 | 39 | describe('Queue processes tests', () => { 40 | beforeAll(() => { 41 | jest.restoreAllMocks(); 42 | }); 43 | it('should console the job id if a job is getting executed', () => { 44 | jest.resetModules(); 45 | jest.spyOn(console, 'log'); 46 | jest.spyOn(cronJob, 'aggregateCheck').mockImplementation(() => {}); 47 | initQueues(); 48 | expect(console.log.mock.calls.length).toBe(5); 49 | expect(console.log.mock.calls[1][0]).toBe( 50 | `${moment()}::Job with id: 1 is being executed.\n` 51 | ); 52 | expect(console.log.mock.calls[2][0]).toBe('done'); 53 | }); 54 | }); 55 | }); 56 | -------------------------------------------------------------------------------- /webpack/server.config.js: -------------------------------------------------------------------------------- 1 | const path = require('path'); 2 | const webpack = require('webpack'); 3 | const dotenv = require('dotenv'); 4 | require('@babel/register'); // will bind itself to node's require and automatically compile files on the fly 5 | 6 | const dotEnvFile = 7 | process.env.ENVIRONMENT_NAME === 'production' 8 | ? `.env` 9 | : `.env.${process.env.ENVIRONMENT_NAME || 'local'}`; 10 | 11 | const env = dotenv.config({ path: dotEnvFile }).parsed; 12 | 13 | const envKeys = { 14 | ...Object.keys(process.env).reduce((prev, next) => { 15 | prev[`process.env.${next}`] = JSON.stringify(process.env[next]); 16 | return prev; 17 | }, {}), 18 | ...Object.keys(env).reduce((prev, next) => { 19 | prev[`process.env.${next}`] = JSON.stringify(env[next]); 20 | return prev; 21 | }, {}) 22 | }; 23 | 24 | module.exports = (options = {}) => ({ 25 | mode: options.mode, 26 | entry: options.entry, 27 | devtool: 'source-map', 28 | module: { 29 | rules: [ 30 | { 31 | test: /\.jsx?$/, // Transform all .js and .jsx files required somewhere with Babel 32 | exclude: /node_modules/, 33 | use: { 34 | loader: 'babel-loader', // allows transpiling JavaScript files using Babel and webpack 35 | options: options.babelQuery 36 | } 37 | } 38 | ] 39 | }, 40 | plugins: options.plugins.concat([new webpack.DefinePlugin(envKeys)]), 41 | optimization: options.optimization, 42 | node: { 43 | __dirname: true 44 | }, 45 | resolve: { 46 | modules: ['node_modules'], 47 | alias: { 48 | utils: path.resolve(__dirname, '../server/utils'), 49 | middlewares: path.resolve(__dirname, '../server/middlewares'), 50 | server: path.resolve(__dirname, '../server'), 51 | api: path.resolve(__dirname, '../server/api'), 52 | config: path.resolve(__dirname, '../config'), 53 | services: path.resolve(__dirname, '../server/services'), 54 | database: path.resolve(__dirname, '../server/database'), 55 | daos: path.resolve(__dirname, '../server/daos'), 56 | 'superagent-proxy': false 57 | }, 58 | 59 | extensions: ['.js'] 60 | }, 61 | output: { 62 | libraryTarget: 'commonjs' 63 | }, 64 | 65 | target: 'node' 66 | }); 67 | -------------------------------------------------------------------------------- /__tests__/server/api/assignRoles/index.test.js: -------------------------------------------------------------------------------- 1 | import supertest from 'supertest'; 2 | import app from 'server'; 3 | 4 | jest.mock('express-jwt', () => secret => (req, res, next) => { 5 | if (!req?.headers['authorization']) { 6 | return res.status(401).json({}); 7 | } 8 | req['user'] = { 9 | 'https://express-demo/roles': ['SUPER_ADMIN'] 10 | }; 11 | next(null, {}); 12 | }); 13 | 14 | jest.mock('auth0', () => ({ 15 | AuthenticationClient: () => ({ 16 | clientCredentialsGrant: () => ({ access_token: 'access' }) 17 | }), 18 | ManagementClient: () => ({ 19 | createUser: () => ({ user_id: 'auth0|12345678' }), 20 | getRole: () => [ 21 | { id: 1, name: 'ADMIN' }, 22 | { id: 2, name: 'SUPER_ADMIN' } 23 | ], 24 | assignRolestoUser: jest.fn() 25 | }) 26 | })); 27 | describe('assignRoles tests ', () => { 28 | it('should esnure it return 401 when no authorization token is passed', async () => { 29 | const res = await supertest(app) 30 | .put('/assign-roles') 31 | .set('Accept', 'application/json') 32 | .send({ 33 | authId: 'auth0|623e8f868ffd8b007', 34 | role: ['ADMIN', 'SUPER_ADMIN'] 35 | }); 36 | expect(res.error.status).toBe(401); 37 | }); 38 | it('should ensure it return 200 when correct authorization token and parameters are passed', async () => { 39 | const res = await supertest(app) 40 | .put('/assign-roles') 41 | .set({ 42 | Accept: 'application/json', 43 | Authorization: 'Bearer dummy-token' 44 | }) 45 | .send({ 46 | authId: 'auth0|623e8f868ffd8b007', 47 | role: ['SUPER_ADMIN'] 48 | }); 49 | expect(res.body.data.message).toBe('Role updated successfully'); 50 | }); 51 | 52 | it('should throw error when correct parameter is not passes', async () => { 53 | const res = await supertest(app) 54 | .put('/assign-roles') 55 | .set({ 56 | Accept: 'application/json', 57 | Authorization: 'Bearer dummy-token' 58 | }) 59 | .send({ 60 | role: ['ADMIN', 'SUPER_ADMIN'] 61 | }); 62 | expect(res.statusCode).toBe(500); 63 | expect(res.error.text).toBe( 64 | '{"error":"user auth0 id must be present"}' 65 | ); 66 | }); 67 | }); 68 | -------------------------------------------------------------------------------- /server/api/cronJob/aggregateJob.js: -------------------------------------------------------------------------------- 1 | import moment from 'moment'; 2 | import { redis } from 'services/redis'; 3 | import { REDIS_IMPLEMENTATION_DATE } from 'utils/constants'; 4 | import { getCategories } from '../products'; 5 | import { 6 | getTotalOrderCountByDate, 7 | getTotalOrderCountByDateForCategory, 8 | getEarliestOrderCreatedDate, 9 | getTotalOrderAmtForDate, 10 | getTotalOrderAmtByDateForCategory 11 | } from '../orders'; 12 | 13 | export const aggregateCheck = async () => { 14 | try { 15 | let startDate, lastSyncFor; 16 | const endDate = moment(REDIS_IMPLEMENTATION_DATE); 17 | const redisValueForLastSync = await redis.get('lastSyncFor'); 18 | if (redisValueForLastSync) { 19 | lastSyncFor = moment(redisValueForLastSync); 20 | } 21 | if (!lastSyncFor) { 22 | startDate = moment(await getEarliestOrderCreatedDate()); 23 | } else if (moment(lastSyncFor).isSameOrAfter(endDate)) { 24 | log.info(`Redis is updated with aggregate values until ${endDate}`); 25 | return; 26 | } else { 27 | startDate = lastSyncFor; 28 | } 29 | const categories = await getCategories(); 30 | while (moment(startDate).isBefore(endDate)) { 31 | const formattedDate = startDate.format('YYYY-MM-DD'); 32 | const totalAmtForDate = await getTotalOrderAmtForDate( 33 | formattedDate 34 | ); 35 | const countForDate = await getTotalOrderCountByDate(formattedDate); 36 | 37 | redis.set( 38 | `${formattedDate}_total`, 39 | JSON.stringify({ 40 | total: totalAmtForDate, 41 | count: countForDate 42 | }) 43 | ); 44 | for (const category of categories) { 45 | const categoryTotal = await getTotalOrderAmtByDateForCategory( 46 | formattedDate, 47 | category 48 | ); 49 | const categoryCount = await getTotalOrderCountByDateForCategory( 50 | formattedDate, 51 | category 52 | ); 53 | 54 | redis.set( 55 | `${formattedDate}_${category}`, 56 | JSON.stringify({ 57 | total: categoryTotal, 58 | count: categoryCount 59 | }) 60 | ); 61 | 62 | await redis.set('lastSyncFor', formattedDate); 63 | } 64 | startDate = startDate.add(1, 'day'); 65 | } 66 | } catch (error) { 67 | log.info('Error while running aggregate check :', error.message); 68 | } 69 | }; 70 | -------------------------------------------------------------------------------- /__tests__/server/api/cronJob/aggregateJob.test.js: -------------------------------------------------------------------------------- 1 | import moment from 'moment'; 2 | import * as orderFns from 'api/orders'; 3 | import * as categoryFns from 'api/products'; 4 | import { redis } from 'services/redis'; 5 | import { REDIS_IMPLEMENTATION_DATE } from 'utils/constants'; 6 | import { aggregateCheck } from 'api/cronJob/aggregateJob'; 7 | import { mockData } from 'utils/mockData'; 8 | import log from 'utils/logger'; 9 | 10 | const { MOCK_CATEGORIES: mockCategories } = mockData; 11 | describe('aggregateCheck function tests', () => { 12 | const date = moment(REDIS_IMPLEMENTATION_DATE).subtract(1, 'day'); 13 | const amt = 25000; 14 | const count = 1500; 15 | const mockError = new Error('Mock Error'); 16 | 17 | beforeEach(() => { 18 | jest.spyOn(orderFns, 'getEarliestOrderCreatedDate').mockResolvedValue( 19 | date 20 | ); 21 | jest.spyOn(categoryFns, 'getCategories').mockImplementation( 22 | () => mockCategories 23 | ); 24 | jest.spyOn(orderFns, 'getTotalOrderAmtForDate').mockResolvedValue(amt); 25 | jest.spyOn(orderFns, 'getTotalOrderCountByDate').mockResolvedValue( 26 | count 27 | ); 28 | jest.spyOn( 29 | orderFns, 30 | 'getTotalOrderAmtByDateForCategory' 31 | ).mockResolvedValue(amt); 32 | jest.spyOn( 33 | orderFns, 34 | 'getTotalOrderCountByDateForCategory' 35 | ).mockResolvedValue(count); 36 | }); 37 | it('should set values in redis if lastSyncFor date is not present', async () => { 38 | jest.spyOn(redis, 'get').mockResolvedValue(); 39 | const redisSpy = jest.spyOn(redis, 'set'); 40 | await aggregateCheck(); 41 | expect(redisSpy).toBeCalledTimes(7); 42 | }); 43 | 44 | it('should log redis is upto date when lastSyncFor date is same or after the redis implementation date', async () => { 45 | const redisSpy = jest 46 | .spyOn(redis, 'get') 47 | .mockResolvedValue(REDIS_IMPLEMENTATION_DATE); 48 | const spy = jest.spyOn(log, 'info'); 49 | 50 | await aggregateCheck(); 51 | expect(redisSpy).toBeCalledTimes(1); 52 | expect(spy).toBeCalledTimes(1); 53 | }); 54 | 55 | it('should set values in redis if the lastSync date is less than end date', async () => { 56 | jest.spyOn(redis, 'get').mockResolvedValue(date); 57 | const redisSpy = jest.spyOn(redis, 'set'); 58 | await aggregateCheck(); 59 | expect(redisSpy).toBeCalledTimes(7); 60 | }); 61 | 62 | it('should thrown an error if there is error thrown from db', async () => { 63 | jest.spyOn(redis, 'get').mockRejectedValueOnce(mockError); 64 | expect(async () => { 65 | await aggregateCheck(); 66 | }).rejects.toThrow(mockError); 67 | }); 68 | }); 69 | -------------------------------------------------------------------------------- /__tests__/__load__/libs/spo-gpo.js: -------------------------------------------------------------------------------- 1 | (function(f){if(typeof exports==="object"&&typeof module!=="undefined"){module.exports=f()}else if(typeof define==="function"&&define.amd){define([],f)}else{var g;if(typeof window!=="undefined"){g=window}else if(typeof global!=="undefined"){g=global}else if(typeof self!=="undefined"){g=self}else{g=this}g.spoGpopolyfill = f()}})(function(){var define,module,exports;return (function(){function r(e,n,t){function o(i,f){if(!n[i]){if(!e[i]){var c="function"==typeof require&&require;if(!f&&c)return c(i,!0);if(u)return u(i,!0);var a=new Error("Cannot find module '"+i+"'");throw a.code="MODULE_NOT_FOUND",a}var p=n[i]={exports:{}};e[i][0].call(p.exports,function(r){var n=e[i][1][r];return o(n||r)},p,p.exports,r,e,n,t)}return n[i].exports}for(var u="function"==typeof require&&require,i=0;i total + product.price, 30 | 0 31 | ); 32 | const order = { 33 | purchasedProducts: products.map(p => p._id), 34 | totalPrice 35 | }; 36 | return ReferencedOrders.create(order); 37 | } 38 | 39 | function createOrder(products, dontCreate, referenced) { 40 | const totalPrice = products.reduce( 41 | (total, product) => total + product.price, 42 | 0 43 | ); 44 | const order = { 45 | purchasedProducts: products, 46 | totalPrice 47 | }; 48 | if (dontCreate) { 49 | return order; 50 | } 51 | let model = UnshardedOrders; 52 | if (referenced) { 53 | model = UnshardedReferencedOrders; 54 | } 55 | return model.create(order); 56 | } 57 | function connectToMongo() { 58 | const envName = process.env.ENVIRONMENT_NAME || 'local'; 59 | console.log('connecting to mongo', envName); 60 | dotenv.config({ path: `.env.${envName}` }); 61 | return mongoose.connect(getMongoUri()); 62 | } 63 | async function runSeeders(func) { 64 | let runInClusterMode = false; 65 | if (process.argv.length >= 3) { 66 | const runInClusterModeFlag = process.argv[2]; 67 | try { 68 | runInClusterMode = JSON.parse(runInClusterModeFlag); 69 | } catch (err) { 70 | // no need to handle 71 | } 72 | } 73 | if (runInClusterMode && cluster.isMaster) { 74 | console.log(`Number of CPUs is ${totalCPUs}`); 75 | console.log(`Master ${process.pid} is running`); 76 | 77 | // Fork workers. 78 | for (let i = 0; i < totalCPUs; i++) { 79 | cluster.fork(); 80 | } 81 | } else { 82 | await func(); 83 | } 84 | 85 | if (!runInClusterMode) { 86 | console.log('done'); 87 | process.exit(1); 88 | } 89 | } 90 | module.exports = { 91 | createProduct, 92 | createOrder, 93 | createOrderWithProductReferenced, 94 | connectToMongo, 95 | runSeeders 96 | }; 97 | -------------------------------------------------------------------------------- /__tests__/server/api/users/index.test.js: -------------------------------------------------------------------------------- 1 | import supertest from 'supertest'; 2 | import * as requests from 'api/requestGenerators'; 3 | import Users from 'database/models/users'; 4 | import userValidator from 'api/users/validator'; 5 | import app from 'server'; 6 | import { createUser } from 'api/users'; 7 | import * as daos from 'api/utils'; 8 | import { mockData } from 'utils/mockData'; 9 | const { MOCK_USER: mockUser } = mockData; 10 | 11 | jest.mock('auth0', () => ({ 12 | AuthenticationClient: () => ({ 13 | clientCredentialsGrant: () => ({ access_token: 'access' }) 14 | }), 15 | ManagementClient: () => ({ 16 | createUser: () => ({ user_id: 'auth0|12345678' }) 17 | }) 18 | })); 19 | 20 | describe('User tests', () => { 21 | it('should called createUser with correct parameter', async () => { 22 | let spy = jest 23 | .spyOn(requests, 'generateCreateUserRequest') 24 | .mockImplementationOnce(() => jest.fn()); 25 | createUser(app, Users.Users, userValidator); 26 | expect(spy).toHaveBeenCalledWith({ 27 | router: app, 28 | model: Users.Users, 29 | validator: userValidator 30 | }); 31 | }); 32 | 33 | it('should call the create user api', async () => { 34 | jest.spyOn(daos, 'createUser').mockResolvedValueOnce({ 35 | _doc: mockUser 36 | }); 37 | const res = await supertest(app) 38 | .post('/users') 39 | .set({ 40 | Accept: 'application/json', 41 | Authorization: 'Bearer dummy-token' 42 | }) 43 | .send({ 44 | firstName: 'Jhon', 45 | lastName: 'Doe', 46 | email: 'doe12@wednesday.is', 47 | password: 'wednesday@1234567', 48 | role: 'SUPER_ADMIN' 49 | }); 50 | expect(res.status).toEqual(200); 51 | expect(res.body.data).toEqual(mockUser); 52 | }); 53 | 54 | it('should call the create user api and throw error', async () => { 55 | jest.spyOn(daos, 'createUser').mockResolvedValueOnce(mockUser); 56 | const res = await supertest(app) 57 | .post('/users') 58 | .set({ 59 | Accept: 'application/json', 60 | Authorization: 'Bearer dummy-token' 61 | }) 62 | .send({}); 63 | expect(res.statusCode).toBe(400); 64 | expect(JSON.stringify(res.body.error)).toContain('must be present'); 65 | }); 66 | 67 | it('should call the create user and throw error', async () => { 68 | jest.spyOn(daos, 'createUser').mockRejectedValueOnce( 69 | new Error('User already exists!') 70 | ); 71 | const res = await supertest(app) 72 | .post('/users') 73 | .set({ 74 | Accept: 'application/json', 75 | Authorization: 'Bearer dummy-token' 76 | }) 77 | .send({ 78 | firstName: 'Jhon', 79 | lastName: 'Doe', 80 | email: 'doe12@wednesday.is', 81 | password: 'wednesday@1234567', 82 | role: 'SUPER_ADMIN' 83 | }); 84 | expect(res.statusCode).toBe(500); 85 | expect(res.error.text).toEqual('{"error":"User already exists!"}'); 86 | }); 87 | }); 88 | -------------------------------------------------------------------------------- /server/middlewares/auth/index.js: -------------------------------------------------------------------------------- 1 | import config from 'config'; 2 | import jwt from 'express-jwt'; 3 | import { isEmpty } from 'lodash'; 4 | import jwks from 'jwks-rsa'; 5 | import { apiFailure } from 'utils/apiUtils'; 6 | import { SCOPE_TYPE } from 'utils/constants'; 7 | import message from 'utils/i18n/message'; 8 | import log from 'utils/logger'; 9 | import { paths } from './paths'; 10 | 11 | export const checkRole = async (req, res, next) => { 12 | try { 13 | const roleArr = req.user[`${config().apiAudience}/roles`]; 14 | let isAllowed = false; 15 | let authMiddleware; 16 | const routePath = getRoutePath(req); 17 | paths.map(route => { 18 | if ( 19 | routePath === route.path && 20 | req.method.toUpperCase() === route.method.toUpperCase() 21 | ) { 22 | if (isEmpty(route.scopes)) { 23 | isAllowed = true; 24 | } else { 25 | route.scopes.forEach(ele => { 26 | if (roleArr.includes(ele)) { 27 | isAllowed = true; 28 | if (route.authMiddleware) { 29 | authMiddleware = route.authMiddleware; 30 | } 31 | return; 32 | } 33 | }); 34 | } 35 | } 36 | }); 37 | if (!isAllowed) { 38 | return apiFailure(res, message.ACCESS_DENIED, 403); 39 | } else if ( 40 | authMiddleware && 41 | !roleArr.includes(SCOPE_TYPE.SUPER_ADMIN) 42 | ) { 43 | const isResourceOwner = await authMiddleware(req, res, next); 44 | if (!isResourceOwner) { 45 | return apiFailure(res, message.ACCESS_DENIED, 403); 46 | } 47 | next(); 48 | } else { 49 | next(); 50 | } 51 | } catch (error) { 52 | return apiFailure(res, error.message, 400); 53 | } 54 | }; 55 | function getRoutePath(req) { 56 | const path = req.route?.path || req.path; 57 | return req.baseUrl + (path === '/' ? '' : path); 58 | } 59 | export const checkJwt = (req, res, next) => { 60 | log.info('incoming request::', getRoutePath(req)); 61 | let pathMatchFound = false; 62 | const routePath = getRoutePath(req); 63 | paths.map(route => { 64 | if ( 65 | routePath === route.path && 66 | req.method.toUpperCase() === route.method.toUpperCase() 67 | ) { 68 | pathMatchFound = true; 69 | } 70 | }); 71 | // If this is a public API there is no matching entry in the path.js 72 | if (!pathMatchFound) { 73 | next(); 74 | return; 75 | } else { 76 | return jwt({ 77 | secret: jwks.expressJwtSecret({ 78 | cache: true, 79 | rateLimit: true, 80 | jwksRequestsPerMinute: 25, 81 | jwksUri: `https://${config().domain}/.well-known/jwks.json` 82 | }), 83 | issuer: `https://${config().domain}/`, 84 | algorithms: ['RS256'] 85 | })(req, res, (err, data) => { 86 | if (err) { 87 | res.send({ errors: [err] }); 88 | return next(err); 89 | } 90 | return checkRole(req, res, next); 91 | }); 92 | } 93 | }; 94 | -------------------------------------------------------------------------------- /.github/workflows/cd.yml: -------------------------------------------------------------------------------- 1 | name: Node Mongo Express CD 2 | 3 | on: 4 | push: 5 | branches: 6 | - main 7 | 8 | jobs: 9 | # docker-build-push-deploy: 10 | # name: Docker build, push and deploy 11 | # runs-on: ubuntu-latest 12 | # steps: 13 | # - name: Checkout 14 | # uses: actions/checkout@v2 15 | # - name: Get branch name 16 | # id: vars 17 | # run: echo ::set-output name=stage::${GITHUB_REF#refs/*/} 18 | # - name: Configure AWS credentials 19 | # uses: aws-actions/configure-aws-credentials@v1 20 | # with: 21 | # aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }} 22 | # aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }} 23 | # aws-region: ${{ secrets.AWS_REGION }} 24 | # - name: Login to Amazon ECR 25 | # id: login-ecr 26 | # uses: aws-actions/amazon-ecr-login@v1 27 | 28 | # - name: Build, tag, and push image to Amazon ECR 29 | # env: 30 | # ECR_REGISTRY: ${{ steps.login-ecr.outputs.registry }} 31 | # ECR_REPOSITORY: ${{ secrets.AWS_ECR_REPOSITORY }}-${{steps.vars.outputs.stage}} 32 | # AWS_REGION: ${{ secrets.AWS_REGION }} 33 | # IMAGE_TAG: ${{ github.sha }} 34 | # run: | 35 | # docker build -t $ECR_REGISTRY/$ECR_REPOSITORY:$IMAGE_TAG . 36 | # docker push $ECR_REGISTRY/$ECR_REPOSITORY:$IMAGE_TAG3 37 | # - name: Render Amazon ECS task definition 38 | # id: ecs-cd-starter-container 39 | # uses: aws-actions/amazon-ecs-render-task-definition@v1 40 | # with: 41 | # task-definition: task-definition/${{steps.vars.outputs.stage}}.json #1 42 | # container-name: ecs-cd-starter-${{steps.vars.outputs.stage}} #2 43 | # image: ${{ steps.login-ecr.outputs.registry }}/${{ secrets.AWS_ECR_REPOSITORY }}-${{steps.vars.outputs.stage}}:${{ github.sha }} 44 | # - name: Deploy to Amazon ECS service 45 | # uses: aws-actions/amazon-ecs-deploy-task-definition@v1 46 | # with: 47 | # task-definition: ${{ steps.pawlyclinic-api-container.outputs.task-definition }} 48 | # service: ecs-cd-starter-${{ steps.vars.outputs.stage }} 49 | # cluster: ecs-cd-starter-${{ steps.vars.outputs.stage }} 50 | # - name: Logout of Amazon ECR 51 | # if: always() 52 | # run: docker logout ${{ steps.login-ecr.outputs.registry }} 53 | 54 | create_badges: 55 | name: Create Badges 56 | runs-on: ubuntu-latest 57 | strategy: 58 | matrix: 59 | node-version: [14.17.x] 60 | steps: 61 | - uses: actions/checkout@v2 62 | - name: Setup environment 63 | uses: actions/setup-node@v2 64 | with: 65 | cache: 'yarn' 66 | node-version: ${{ matrix.node-version }} 67 | - name: Install dependencies 68 | run: yarn install 69 | - name: Create badges 70 | run: yarn run test:badges 71 | - name: Commit badges 72 | uses: EndBug/add-and-commit@v7 73 | with: 74 | author_name: Gitflow 75 | author_email: git@wednesday.is 76 | message: 'chore: update badges' 77 | add: 'badges/' 78 | - name: Git pull origin 79 | run: | 80 | git pull origin ${{ github.ref }} 81 | 82 | - name: Push changes 83 | uses: ad-m/github-push-action@master 84 | with: 85 | github_token: ${{ secrets.GITHUB_TOKEN }} 86 | branch: ${{ github.ref }} 87 | -------------------------------------------------------------------------------- /__tests__/server/api/aggregate/orders/index.test.js: -------------------------------------------------------------------------------- 1 | import supertest from 'supertest'; 2 | import app from 'server'; 3 | import * as daos from 'daos/order'; 4 | 5 | jest.mock('express-jwt', () => () => (req, res, next) => { 6 | if (!req?.headers['authorization']) { 7 | return res.status(401).json({}); 8 | } 9 | req['user'] = { 10 | 'https://express-demo/roles': ['SUPER_ADMIN'] 11 | }; 12 | next(null, {}); 13 | }); 14 | 15 | jest.mock('auth0', () => ({ 16 | AuthenticationClient: () => ({ 17 | clientCredentialsGrant: () => ({ access_token: 'access' }) 18 | }), 19 | ManagementClient: () => ({ 20 | createUser: () => ({ user_id: 'auth0|12345678' }), 21 | getRole: () => [ 22 | { id: 1, name: 'ADMIN' }, 23 | { id: 2, name: 'SUPER_ADMIN' } 24 | ], 25 | assignRolestoUser: jest.fn() 26 | }) 27 | })); 28 | 29 | describe('aggregate tests', () => { 30 | const amt = 15000; 31 | const date = '2021-01-05'; 32 | const category = 'Music'; 33 | 34 | it('should throw error when authorization token is not sent in header', async () => { 35 | const res = await supertest(app) 36 | .get('/aggregate/order-amount') 37 | .set({ 38 | Accept: 'application/json' 39 | }) 40 | .send({}); 41 | 42 | expect(res.statusCode).toBe(401); 43 | }); 44 | 45 | it('should throw an error when date is not sent in query params', async () => { 46 | const res = await supertest(app) 47 | .get('/aggregate/order-amount') 48 | .set({ 49 | Accept: 'application/json', 50 | Authorization: 'Bearer dummy-token' 51 | }) 52 | .send({}); 53 | 54 | expect(res.statusCode).toBe(500); 55 | expect(res.error.text).toBe('{"error":"Add a valid date"}'); 56 | }); 57 | 58 | it('should throw an error when an invalid date format is sent in query params', async () => { 59 | const res = await supertest(app) 60 | .get('/aggregate/order-amount?date=2021-01-0505:00') 61 | .set({ 62 | Accept: 'application/json', 63 | Authorization: 'Bearer dummy-token' 64 | }) 65 | .send({}); 66 | 67 | expect(res.statusCode).toBe(500); 68 | expect(res.error.text).toBe('{"error":"Add a valid date"}'); 69 | }); 70 | it('should return total order amount for date sent in query', async () => { 71 | const totalAmtForDateSpy = jest 72 | .spyOn(daos, 'totalAmtForDate') 73 | .mockResolvedValueOnce(amt); 74 | const res = await supertest(app) 75 | .get(`/aggregate/order-amount?date=${date}`) 76 | .set({ 77 | Accept: 'application/json', 78 | Authorization: 'Bearer dummy-token' 79 | }) 80 | .send({}); 81 | expect(totalAmtForDateSpy).toBeCalledWith(date); 82 | expect(res.body.data.totalOrderAmount).toBe(amt); 83 | expect(res.statusCode).toBe(200); 84 | }); 85 | it('should return total order amount by date for category sent in query', async () => { 86 | const totalByDateForCategorySpy = jest 87 | .spyOn(daos, 'totalByDateForCategory') 88 | .mockResolvedValueOnce(amt); 89 | const res = await supertest(app) 90 | .get(`/aggregate/order-amount?date=${date}&category=${category}`) 91 | .set({ 92 | Accept: 'application/json', 93 | Authorization: 'Bearer dummy-token' 94 | }) 95 | .send({}); 96 | expect(totalByDateForCategorySpy).toBeCalledWith(date, category); 97 | expect(res.body.data.totalOrderAmount).toBe(amt); 98 | expect(res.statusCode).toBe(200); 99 | }); 100 | }); 101 | -------------------------------------------------------------------------------- /.github/workflows/prod-release.yml: -------------------------------------------------------------------------------- 1 | on: 2 | push: 3 | branches: 4 | - main 5 | 6 | name: Create Production Release 7 | 8 | jobs: 9 | build: 10 | name: Create Production Release 11 | runs-on: ubuntu-latest 12 | steps: 13 | - name: Actions Ecosystem Action Get Merged Pull Request 14 | uses: actions-ecosystem/action-get-merged-pull-request@v1.0.1 15 | id: getMergedPR 16 | with: 17 | github_token: ${{ secrets.GITHUB_TOKEN }} 18 | - name: Checkout code 19 | uses: actions/checkout@v2 20 | - run: | 21 | git fetch --prune --unshallow --tags 22 | - name: Get Commit Message 23 | run: | 24 | declare -A category=( [fix]="" [chore]="" [revert]="" [build]="" [docs]="" [feat]="" [perf]="" [refactor]="" [style]="" [temp]="" [test]="" [ci]="" [others]="") 25 | declare -A categoryTitle=( [fix]="
Bug Fixes
" [build]="
Build
" [docs]="
Documentation
" [feat]="
New Features
" [chore]="
Changes to build process or aux tools
" [ci]="
Changes to CI config
" [temp]="
Temporary commit
" [perf]="
Performance Enhancement
" [revert]="
Revert Commits
" [refactor]="
Refactored
" [style]="
Changed Style
" [test]="
Added Tests
" [others]="
Others
") 26 | msg="#${{ steps.getMergedPR.outputs.number}} ${{ steps.getMergedPR.outputs.title}}" 27 | for i in $(git log --format=%h $(git merge-base HEAD^1 HEAD^2)..HEAD^2) 28 | do 29 | IFS=":" read -r type cmmsg <<< $(git log --format=%B -n 1 $i) 30 | type="${type}" | xargs 31 | text_msg="    • $i - ${cmmsg}
" 32 | flag=1 33 | for i in "${!category[@]}" 34 | do 35 | if [ "${type}" == "$i" ] 36 | then 37 | category[$i]+="${text_msg}" 38 | flag=0 39 | break 40 | fi 41 | done 42 | if [ $flag -eq 1 ] 43 | then 44 | category[others]+="${text_msg}" 45 | fi 46 | done 47 | for i in "${!category[@]}" 48 | do 49 | if [ ! -z "${category[$i]}" ] && [ "others" != "$i" ] 50 | then 51 | msg+="${categoryTitle[$i]}${category[$i]}" 52 | fi 53 | done 54 | if [ ! -z "${category[others]}" ] 55 | then 56 | msg+="${categoryTitle[others]}${category[others]}" 57 | fi 58 | echo "COMMIT_MESSAGE=${msg}" >> $GITHUB_ENV 59 | - name: Bump version and push tag 60 | run: | 61 | cd "$GITHUB_WORKSPACE" 62 | git config user.email "$GITHUB_ACTOR@users.noreply.github.com" 63 | git config user.name "$GITHUB_ACTOR" 64 | npm version patch 65 | git push 66 | - name: get-npm-version 67 | id: package-version 68 | uses: martinbeentjes/npm-get-version-action@master 69 | - name: Create Prod Release 70 | uses: actions/create-release@latest 71 | env: 72 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} 73 | with: 74 | tag_name: ${{ steps.package-version.outputs.current-version}} 75 | release_name: v${{ steps.package-version.outputs.current-version}} 76 | body: ${{ env.COMMIT_MESSAGE }} 77 | draft: false 78 | prerelease: false 79 | -------------------------------------------------------------------------------- /.github/workflows/beta-release.yml: -------------------------------------------------------------------------------- 1 | on: 2 | push: 3 | branches: 4 | - qa 5 | 6 | name: Create Beta Release 7 | 8 | jobs: 9 | build: 10 | name: Create Beta Release 11 | runs-on: ubuntu-latest 12 | steps: 13 | - name: Actions Ecosystem Action Get Merged Pull Request 14 | uses: actions-ecosystem/action-get-merged-pull-request@v1.0.1 15 | id: getMergedPR 16 | with: 17 | github_token: ${{ secrets.GITHUB_TOKEN }} 18 | - name: Checkout code 19 | uses: actions/checkout@v2 20 | - run: | 21 | git fetch --prune --unshallow --tags 22 | - name: Get Commit Message 23 | run: | 24 | declare -A category=( [fix]="" [chore]="" [revert]="" [build]="" [docs]="" [feat]="" [perf]="" [refactor]="" [style]="" [temp]="" [test]="" [ci]="" [others]="") 25 | declare -A categoryTitle=( [fix]="
Bug Fixes
" [build]="
Build
" [docs]="
Documentation
" [feat]="
New Features
" [chore]="
Changes to build process or aux tools
" [ci]="
Changes to CI config
" [temp]="
Temporary commit
" [perf]="
Performance Enhancement
" [revert]="
Revert Commits
" [refactor]="
Refactored
" [style]="
Changed Style
" [test]="
Added Tests
" [others]="
Others
") 26 | msg="#${{ steps.getMergedPR.outputs.number}} ${{ steps.getMergedPR.outputs.title}}" 27 | for i in $(git log --format=%h $(git merge-base HEAD^1 HEAD^2)..HEAD^2) 28 | do 29 | IFS=":" read -r type cmmsg <<< $(git log --format=%B -n 1 $i) 30 | type="${type}" | xargs 31 | text_msg="    • $i - ${cmmsg}
" 32 | flag=1 33 | for i in "${!category[@]}" 34 | do 35 | if [ "${type}" == "$i" ] 36 | then 37 | category[$i]+="${text_msg}" 38 | flag=0 39 | break 40 | fi 41 | done 42 | if [ $flag -eq 1 ] 43 | then 44 | category[others]+="${text_msg}" 45 | fi 46 | done 47 | for i in "${!category[@]}" 48 | do 49 | if [ ! -z "${category[$i]}" ] && [ "others" != "$i" ] 50 | then 51 | msg+="${categoryTitle[$i]}${category[$i]}" 52 | fi 53 | done 54 | # if [ ! -z "${category[others]}" ] 55 | # then 56 | # msg+="${categoryTitle[others]}${category[others]}" 57 | # fi 58 | echo "COMMIT_MESSAGE=${msg}" >> $GITHUB_ENV 59 | - name: Bump version and push tag 60 | run: | 61 | cd "$GITHUB_WORKSPACE" 62 | git config user.email "$GITHUB_ACTOR@users.noreply.github.com" 63 | git config user.name "$GITHUB_ACTOR" 64 | npm version patch 65 | git push && git push --tags 66 | - name: get-npm-version 67 | id: package-version 68 | uses: martinbeentjes/npm-get-version-action@master 69 | - name: Create Beta Release 70 | uses: actions/create-release@latest 71 | env: 72 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} 73 | with: 74 | tag_name: ${{ steps.package-version.outputs.current-version}}-Beta 75 | release_name: v${{ steps.package-version.outputs.current-version}}-Beta 76 | body: ${{ env.COMMIT_MESSAGE }} 77 | draft: false 78 | prerelease: false 79 | -------------------------------------------------------------------------------- /__tests__/server/middlewares/auth/index.test.js: -------------------------------------------------------------------------------- 1 | import message from 'utils/i18n/message'; 2 | import * as utils from 'utils/apiUtils'; 3 | import { checkRole } from 'middlewares/auth'; 4 | import { ownershipBasedAccessControl } from 'middlewares/auth/ownershipBasedAccessControl'; 5 | jest.mock('middlewares/auth/ownershipBasedAccessControl', () => ({ 6 | ownershipBasedAccessControl: jest.fn() 7 | })); 8 | 9 | describe('checkRole tests', () => { 10 | let req; 11 | let next; 12 | let res; 13 | let apiFailureMock; 14 | let mocks; 15 | beforeEach(() => { 16 | process.env.API_AUDIENCE = 'https://node-express-demo'; 17 | req = { 18 | route: { 19 | path: '/' 20 | }, 21 | baseUrl: '/assign-roles', 22 | method: 'PUT', 23 | user: { 24 | 'https://node-express-demo/roles': ['SUPER_ADMIN'] 25 | } 26 | }; 27 | next = jest.fn(); 28 | const mockResponse = () => { 29 | const res = {}; 30 | res.json = jest.fn().mockReturnValue(res); 31 | 32 | return res; 33 | }; 34 | res = mockResponse(); 35 | apiFailureMock = jest 36 | .spyOn(utils, 'apiFailure') 37 | .mockImplementationOnce((res, errMsg, status) => {}); 38 | mocks = { 39 | ownershipBasedAccessControl 40 | }; 41 | }); 42 | 43 | const mockFunction = (object, methodName, returnValue) => 44 | jest.spyOn(object, methodName).mockResolvedValueOnce(returnValue); 45 | 46 | it('should ensure it return 403 when the user doesnot have right role to access the route', async () => { 47 | req.user['https://node-express-demo/roles'] = 'STORE_ADMIN'; 48 | await checkRole(req, res, next); 49 | expect(apiFailureMock).toBeCalledWith(res, message.ACCESS_DENIED, 403); 50 | }); 51 | it(`should ensure it return 403 when the user have right role but is not the owner of the resource`, async () => { 52 | req.user['https://node-express-demo/roles'] = 'STORE_ADMIN'; 53 | req.user[`https://node-express-demo/email`] = 'asser@wednesday.com'; 54 | req.params = { _id: '62861b5be1897fc8b1d82360' }; 55 | req.baseUrl = '/stores'; 56 | req.route.path = '/:_id'; 57 | req.method = 'GET'; 58 | mockFunction(mocks, 'ownershipBasedAccessControl', false); 59 | await checkRole(req, res, next); 60 | expect(apiFailureMock).toBeCalledWith(res, message.ACCESS_DENIED, 403); 61 | }); 62 | 63 | it(`should call next when the user have right role and is the owner of the resource`, async () => { 64 | req.user['https://node-express-demo/roles'] = 'STORE_ADMIN'; 65 | req.user[`https://node-express-demo/email`] = 'asser@wednesday.com'; 66 | req.params = { _id: '62861b5be1897fc8b1d82361' }; 67 | req.baseUrl = '/stores'; 68 | req.route.path = '/:_id'; 69 | req.method = 'GET'; 70 | mockFunction(mocks, 'ownershipBasedAccessControl', true); 71 | await checkRole(req, res, next); 72 | expect(next).toBeCalled(); 73 | }); 74 | 75 | it('should call next when the user has correct role and scope to access the route', async () => { 76 | req.user['https://node-express-demo/roles'] = 'SUPER_ADMIN'; 77 | await checkRole(req, res, next); 78 | expect(next).toBeCalled(); 79 | }); 80 | 81 | it(`should return error from catch block`, async () => { 82 | const mockError = new Error('test'); 83 | req.user['https://node-express-demo/roles'] = 'STORE_ADMIN'; 84 | req.user[`https://node-express-demo/email`] = 'asser@wednesday.com'; 85 | req.params = { _id: '62861b5be1897fc8b1d82360' }; 86 | req.baseUrl = '/stores'; 87 | req.route.path = '/:_id'; 88 | req.method = 'GET'; 89 | jest.spyOn(mocks, 'ownershipBasedAccessControl').mockImplementationOnce( 90 | () => { 91 | throw mockError; 92 | } 93 | ); 94 | 95 | expect(async () => { 96 | await checkRole(req, res, next); 97 | }).rejects.toThrowError('test'); 98 | }); 99 | }); 100 | -------------------------------------------------------------------------------- /__tests__/server/middlewares/auth/ownershipBasedAccessControl.test.js: -------------------------------------------------------------------------------- 1 | import * as httpContext from 'express-http-context'; 2 | import { Stores } from 'database/models/stores'; 3 | import { Users } from 'database/models/users'; 4 | import message from 'utils/i18n/message'; 5 | import { ownershipBasedAccessControl } from 'middlewares/auth/ownershipBasedAccessControl'; 6 | 7 | describe('ownershipBasedAccessControl tests', () => { 8 | let mockingoose; 9 | beforeEach(() => { 10 | mockingoose = require('mockingoose'); 11 | }); 12 | const mockResource = { 13 | _id: '62861b5be1897fc8b1d82360', 14 | name: 'Reliance', 15 | address: 'Andheri East', 16 | admin: [ 17 | { 18 | firstName: 'Sourav', 19 | lastName: 'Sharma', 20 | email: 'sharma@yopmail.com', 21 | authId: 'auth|654321aerty', 22 | role: 'STORE_ADMIN' 23 | } 24 | ] 25 | }; 26 | it('return true if the requester is the owner of resource', async () => { 27 | mockingoose(Stores).toReturn(mockResource, 'findOne'); 28 | const requestorEmail = mockResource.admin[0].email; 29 | const configObj = { 30 | condition: { 'admin.email': requestorEmail }, 31 | ownerKey: 'admin' 32 | }; 33 | const result = await ownershipBasedAccessControl( 34 | requestorEmail, 35 | Stores, 36 | configObj 37 | ); 38 | expect(result).toBe(true); 39 | }); 40 | it('return true if the requester is the resource owner ', async () => { 41 | const mockUser = { 42 | _id: '62861b5be1897fc8b1d82360', 43 | firstName: 'Sourav', 44 | lastName: 'Sharma', 45 | email: 'sharma@yopmail.com', 46 | authId: 'auth|654321aerty', 47 | role: 'STORE_ADMIN' 48 | }; 49 | mockingoose(Users).toReturn(mockUser, 'findOne'); 50 | const requestorEmail = mockUser.email; 51 | const configObj = { 52 | condition: { email: requestorEmail }, 53 | ownerKey: 'email' 54 | }; 55 | const result = await ownershipBasedAccessControl( 56 | requestorEmail, 57 | Users, 58 | configObj 59 | ); 60 | expect(result).toBe(true); 61 | }); 62 | 63 | it('return true and set findAll condition if the requester is the owner of resource', async () => { 64 | mockingoose(Stores).toReturn(mockResource, 'findOne'); 65 | const requestorEmail = mockResource.admin[0].email; 66 | const configObj = { 67 | condition: { 'admin.email': requestorEmail }, 68 | ownerKey: 'admin', 69 | findAll: { key: '_id', value: null }, 70 | resourceOwnershipPath: '_id' 71 | }; 72 | const setSpy = jest.spyOn(httpContext.default, 'set'); 73 | const result = await ownershipBasedAccessControl( 74 | requestorEmail, 75 | Stores, 76 | configObj 77 | ); 78 | expect(setSpy).toBeCalledTimes(1); 79 | expect(result).toBe(true); 80 | }); 81 | 82 | it('should return false if the requestor is not the owner of resource', async () => { 83 | mockingoose(Stores).toReturn(mockResource, 'findOne'); 84 | const requestorEmail = 'ramesh@yopmail.com'; 85 | const configObj = { 86 | condition: { 'admin.email': requestorEmail }, 87 | ownerKey: 'admin' 88 | }; 89 | const result = await ownershipBasedAccessControl( 90 | requestorEmail, 91 | Stores, 92 | configObj 93 | ); 94 | expect(result).toBe(false); 95 | }); 96 | 97 | it('should throw error if resource is not found', async () => { 98 | mockingoose(Stores).toReturn(undefined, 'findOne'); 99 | const requestorEmail = mockResource.admin[0].email; 100 | const configObj = { 101 | condition: { 'admin.email': requestorEmail }, 102 | ownerKey: 'admin' 103 | }; 104 | expect(async () => { 105 | await ownershipBasedAccessControl( 106 | requestorEmail, 107 | Stores, 108 | configObj 109 | ); 110 | }).rejects.toThrowError(message.RESOURCE_NOT_FOUND); 111 | }); 112 | }); 113 | -------------------------------------------------------------------------------- /server/utils/routeLister.js: -------------------------------------------------------------------------------- 1 | import { isTestEnv } from 'utils'; 2 | 3 | export function list(app) { 4 | if (isTestEnv()) { 5 | return; 6 | } 7 | const path = require('path'); 8 | 9 | const defaultOptions = { 10 | prefix: '', 11 | spacer: 7 12 | }; 13 | 14 | const COLORS = { 15 | yellow: 33, 16 | green: 32, 17 | blue: 34, 18 | red: 31, 19 | grey: 90, 20 | magenta: 35, 21 | clear: 39 22 | }; 23 | 24 | const spacer = x => 25 | x > 0 ? [...new Array(x)].map(() => ' ').join('') : ''; 26 | 27 | const colorText = (color, string) => 28 | `\u001b[${color}m${string}\u001b[${COLORS.clear}m`; 29 | 30 | function colorMethod(method) { 31 | switch (method) { 32 | case 'POST': 33 | return colorText(COLORS.yellow, method); 34 | case 'GET': 35 | return colorText(COLORS.green, method); 36 | case 'PUT': 37 | return colorText(COLORS.blue, method); 38 | case 'DELETE': 39 | return colorText(COLORS.red, method); 40 | case 'PATCH': 41 | return colorText(COLORS.grey, method); 42 | default: 43 | return method; 44 | } 45 | } 46 | 47 | function getPathFromRegex(regexp) { 48 | return regexp 49 | .toString() 50 | .replace('/^', '') 51 | .replace('?(?=\\/|$)/i', '') 52 | .replace(/\\\//g, '/'); 53 | } 54 | 55 | function combineStacks(acc, stack) { 56 | if (stack.handle.stack) { 57 | const routerPath = getPathFromRegex(stack.regexp); 58 | return [ 59 | ...acc, 60 | ...stack.handle.stack.map(stack => ({ routerPath, ...stack })) 61 | ]; 62 | } 63 | return [...acc, stack]; 64 | } 65 | 66 | function getStacks(app) { 67 | // Express 3 68 | if (app.routes) { 69 | // convert to express 4 70 | return Object.keys(app.routes) 71 | .reduce((acc, method) => [...acc, ...app.routes[method]], []) 72 | .map(route => ({ route: { stack: [route] } })); 73 | } 74 | 75 | // Express 4 76 | if (app._router && app._router.stack) { 77 | return app._router.stack.reduce(combineStacks, []); 78 | } 79 | 80 | // Express 4 Router 81 | if (app.stack) { 82 | return app.stack.reduce(combineStacks, []); 83 | } 84 | 85 | // Express 5 86 | if (app.router && app.router.stack) { 87 | return app.router.stack.reduce(combineStacks, []); 88 | } 89 | 90 | return []; 91 | } 92 | 93 | function expressListRoutes(app, opts) { 94 | const stacks = getStacks(app); 95 | const options = { ...defaultOptions, ...opts }; 96 | 97 | if (stacks) { 98 | for (const stack of stacks) { 99 | if (stack.route) { 100 | const routeLogged = {}; 101 | for (const route of stack.route.stack) { 102 | const method = route.method 103 | ? route.method.toUpperCase() 104 | : null; 105 | if (!routeLogged[method] && method) { 106 | const stackMethod = colorMethod(method); 107 | const stackSpace = spacer( 108 | options.spacer - method.length 109 | ); 110 | const stackPath = path.resolve( 111 | [ 112 | options.prefix, 113 | stack.routerPath, 114 | stack.route.path, 115 | route.path 116 | ] 117 | .filter(s => !!s) 118 | .join('') 119 | ); 120 | console.info(stackMethod, stackSpace, stackPath); 121 | routeLogged[method] = true; 122 | } 123 | } 124 | } 125 | } 126 | } 127 | } 128 | 129 | expressListRoutes(app); 130 | } 131 | -------------------------------------------------------------------------------- /server/api/requestGenerators.js: -------------------------------------------------------------------------------- 1 | import { 2 | deleteItem, 3 | fetchItem, 4 | fetchItems, 5 | createItem, 6 | updateItem, 7 | createUser 8 | } from 'api/utils'; 9 | import { 10 | apiFailure, 11 | apiSuccess, 12 | createValidatorMiddlewares 13 | } from 'utils/apiUtils'; 14 | import { clientCredentialsGrant, managementClient } from 'utils/auth0'; 15 | import { REQUEST_TYPES } from './customApisMapper'; 16 | import config from 'config'; 17 | import { validateObjectId, validateReqBody, validateSchema } from 'utils'; 18 | 19 | export const generateRequest = (type, router, model, validator) => { 20 | const middlewares = [...createValidatorMiddlewares(validator)]; 21 | switch (type) { 22 | case REQUEST_TYPES.create: 23 | middlewares.push(validateSchema(model)); 24 | generatePostRequest({ router, model, middlewares }); 25 | break; 26 | case REQUEST_TYPES.update: 27 | middlewares.push(validateObjectId, validateReqBody(model)); 28 | generatePatchRequest({ router, model, middlewares }); 29 | break; 30 | case REQUEST_TYPES.fetchOne: 31 | middlewares.push(validateObjectId); 32 | generateFetchOneRequest({ router, model, middlewares }); 33 | break; 34 | case REQUEST_TYPES.fetchAll: 35 | generateFetchAllRequest({ router, model, middlewares }); 36 | break; 37 | case REQUEST_TYPES.remove: 38 | middlewares.push(validateObjectId); 39 | generateDeleteRequest({ router, model, middlewares }); 40 | break; 41 | } 42 | }; 43 | export const generatePostRequest = ({ router, model, middlewares }) => { 44 | router.post('/', ...middlewares, async (req, res) => { 45 | try { 46 | const item = await createItem(model, req.body); 47 | return apiSuccess(res, item); 48 | } catch (err) { 49 | return apiFailure(res, err.message); 50 | } 51 | }); 52 | }; 53 | export const generatePatchRequest = ({ router, model, middlewares }) => { 54 | router.patch('/:_id', ...middlewares, (req, res, next) => { 55 | const { _id } = req.params; 56 | return updateItem(model, { _id }, req.body) 57 | .then(items => apiSuccess(res, items)) 58 | .catch(err => apiFailure(res, err.message)); 59 | }); 60 | }; 61 | 62 | export const generateFetchAllRequest = ({ 63 | router, 64 | model, 65 | middlewares = [] 66 | }) => { 67 | router.get('/', ...middlewares, async (req, res, next) => 68 | fetchItems(model, req.query) 69 | .then(items => apiSuccess(res, items)) 70 | .catch(err => apiFailure(res, err.message)) 71 | ); 72 | }; 73 | export const generateFetchOneRequest = ({ router, model, middlewares }) => { 74 | router.get('/:_id', ...middlewares, async (req, res, next) => { 75 | const { _id } = req.params; 76 | return fetchItem(model, { _id }) 77 | .then(item => apiSuccess(res, item)) 78 | .catch(err => apiFailure(res, err.message)); 79 | }); 80 | }; 81 | 82 | export const generateDeleteRequest = ({ router, model, middlewares }) => { 83 | router.delete('/:_id', ...middlewares, (req, res, next) => { 84 | const { _id } = req.params; 85 | return deleteItem(model, { _id }) 86 | .then(items => apiSuccess(res, items)) 87 | .catch(err => apiFailure(res, err.message)); 88 | }); 89 | }; 90 | 91 | export const generateCreateUserRequest = ({ router, model, validator }) => { 92 | const middlewares = createValidatorMiddlewares(validator); 93 | router.post('/', ...middlewares, async (req, res, next) => { 94 | try { 95 | const { email, password } = req.body; 96 | const auth = await clientCredentialsGrant(); 97 | const mgmtAuth0 = await managementClient(auth); 98 | const authUser = await mgmtAuth0.createUser({ 99 | connection: config().connection, 100 | email: email, 101 | password: password 102 | }); 103 | req.body.authId = authUser.user_id; 104 | let user = await createUser(model, req.body); 105 | user = user._doc; 106 | delete user.authId; 107 | return apiSuccess(res, user); 108 | } catch (err) { 109 | return apiFailure(res, err.message); 110 | } 111 | }); 112 | }; 113 | -------------------------------------------------------------------------------- /__tests__/__load__/script.js: -------------------------------------------------------------------------------- 1 | // Auto-generated by the postman-to-k6 converter 2 | 3 | import './libs/shim/core.js'; 4 | import './libs/shim/urijs.js'; 5 | import { check, group } from 'k6'; 6 | import http from 'k6/http'; 7 | 8 | export const options = { 9 | stages: [ 10 | { duration: '10s', target: 100 }, // simulate ramp-up of traffic from 1 to 100 users over 5 minutes. 11 | { duration: '10s', target: 100 }, // stay at 100 users for 10 minutes 12 | { duration: '10s', target: 0 } // ramp-down to 0 users 13 | ], 14 | thresholds: { 15 | http_req_duration: ['p(99)<2000'], // 99% of requests must complete below 1.5s, 16 | http_req_blocked: ['p(99)<1'], 17 | http_req_waiting: ['p(99)<2000'], 18 | http_req_failed: ['rate<0.01'] // http errors should be less than 1% 19 | } 20 | }; 21 | 22 | const baseUrl = 'http://localhost:9000'; 23 | const Request = Symbol.for('request'); 24 | postman[Symbol.for('initial')]({ 25 | options, 26 | environment: { 27 | baseUrl 28 | } 29 | }); 30 | 31 | export function setup() { 32 | let res = http.post(`${baseUrl}/login`, { 33 | username: 'rushabh@wednesday.is', 34 | password: 'Wednesday@123' 35 | }); 36 | check(res, { 37 | 'status is 200': r => r.status === 200 38 | }); 39 | return { accessToken: res.json().data.access_token }; 40 | } 41 | export default function (data) { 42 | group('Orders', function () { 43 | postman[Request]({ 44 | name: 'Create Order', 45 | id: '71eb778d-0a8f-4798-805b-8e922ec3f16f', 46 | method: 'POST', 47 | address: '{{baseUrl}}/orders', 48 | data: '{\n "purchasedProducts": [\n {\n "name": "Handcrafted Steel Keyboard",\n "price": 39000,\n "category": "Computers",\n "totalSales": 0,\n "quantityAverage": 0,\n "schema": 1\n },\n {\n "name": "Incredible Granite Soap",\n "price": 15400,\n "category": "Garden",\n "totalSales": 0,\n "quantityAverage": 0,\n "schema": 1\n }\n ],\n "totalPrice": 54400\n}', 49 | headers: { 50 | 'Content-Type': 'application/json', 51 | Authorization: `bearer ${data.accessToken}` 52 | }, 53 | post(response) { 54 | pm.test('Status code is 200', function () { 55 | pm.response.to.have.status(200); 56 | var jsonData = JSON.parse(responseBody); 57 | postman.setEnvironmentVariable( 58 | 'order_id', 59 | jsonData.data._id 60 | ); 61 | }); 62 | } 63 | }); 64 | 65 | postman[Request]({ 66 | name: 'Fetch Order by _id', 67 | id: '2518c9ce-30f1-4f76-9025-403149767ca4', 68 | method: 'GET', 69 | address: '{{baseUrl}}/orders/{{order_id}}', 70 | headers: { 71 | Authorization: `bearer ${data.accessToken}`, 72 | 'Content-Type': 'application/json' 73 | } 74 | }); 75 | 76 | postman[Request]({ 77 | name: 'Fetch Orders', 78 | id: 'adac227c-3609-4667-902f-abc435adfa9f', 79 | method: 'GET', 80 | address: '{{baseUrl}}/orders?limit=99&page=1', 81 | headers: { 82 | 'Content-Type': 'application/json', 83 | Authorization: `bearer ${data.accessToken}` 84 | } 85 | }); 86 | 87 | postman[Request]({ 88 | name: 'Fetch Referenced Orders', 89 | id: '3da304b8-67e7-416b-a63b-778f31ad9c29', 90 | method: 'GET', 91 | address: '{{baseUrl}}/referenced-orders?limit=99&page=1', 92 | headers: { 93 | 'Content-Type': 'application/json', 94 | Authorization: `bearer ${data.accessToken}` 95 | } 96 | }); 97 | 98 | postman[Request]({ 99 | name: 'Fetch Unsharded Orders', 100 | id: 'c55265c4-2fd1-4252-83a4-163c6ea955b0', 101 | method: 'GET', 102 | address: '{{baseUrl}}/unsharded-orders?limit=99&page=1', 103 | headers: { 104 | Authorization: `bearer ${data.accessToken}`, 105 | 'Content-Type': 'application/json' 106 | } 107 | }); 108 | 109 | postman[Request]({ 110 | name: 'Fetch Unsharded Referenced Orders', 111 | id: 'b1ed181e-ce92-4e90-a986-293ee5670f4e', 112 | method: 'GET', 113 | address: '{{baseUrl}}/unsharded-referenced-orders?limit=99&page=1', 114 | headers: { 115 | 'Content-Type': 'application/json', 116 | Authorization: `bearer ${data.accessToken}` 117 | } 118 | }); 119 | }); 120 | } 121 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "node-mongo-express", 3 | "version": "10.0.37", 4 | "description": "A basic starter web app with node, express and mongoose", 5 | "main": "index.js", 6 | "scripts": { 7 | "test": "jest --coverage", 8 | "start": "node dist/main.js", 9 | "start:development": "ENVIRONMENT_NAME=development node dist/main.js", 10 | "build:env": "webpack-cli --config webpack/production.config.js --stats-error-details", 11 | "build:dev": "webpack-cli --config webpack/dev.config.js --stats-error-details", 12 | "start:local": "ENVIRONMENT_NAME=local && yarn build:dev && yarn start", 13 | "start:sharded": "export MONGO_PORT=60000 && yarn start", 14 | "postinstall": "link-module-alias", 15 | "preinstall": "source /usr/local/opt/nvm/nvm.sh; nvm use; command -v link-module-alias && link-module-alias clean || true", 16 | "format": "prettier-standard './**/**/*.js'", 17 | "lint": "npm run lint:js", 18 | "lint:eslint": "eslint --ignore-path .eslintignore --ignore-pattern server/bin", 19 | "lint:eslint:fix": "eslint --ignore-path .eslintignore --ignore-pattern server/bin --fix", 20 | "lint:js": "npm run lint:eslint -- . ", 21 | "lint:staged": "lint-staged", 22 | "test:badges": "npm run test && jest-coverage-badges --output './badges'", 23 | "prettify": "prettier --write", 24 | "precommit": "lint:staged" 25 | }, 26 | "repository": { 27 | "type": "git", 28 | "url": "git+https://github.com/wednesday-solutions/node-mongo-express.git" 29 | }, 30 | "keywords": [ 31 | "node", 32 | "express", 33 | "mongo", 34 | "mongoose", 35 | "mongodb", 36 | "node-express", 37 | "node-express-mongo" 38 | ], 39 | "author": "Wednesday Solutions", 40 | "license": "MIT", 41 | "engines": { 42 | "node": "14.17.x" 43 | }, 44 | "bugs": { 45 | "url": "https://github.com/wednesday-solutions/node-mongo-express/issues" 46 | }, 47 | "homepage": "https://github.com/wednesday-solutions/node-mongo-express#readme", 48 | "dependencies": { 49 | "auth0": "^2.40.0", 50 | "body-parser": "^1.19.1", 51 | "bull": "^4.8.1", 52 | "bunyan": "^1.8.15", 53 | "cors": "^2.8.5", 54 | "dotenv": "^16.0.0", 55 | "express": "^4.17.2", 56 | "express-http-context": "^1.2.4", 57 | "express-jwt": "^6.1.1", 58 | "express-rate-limit": "^6.3.0", 59 | "express-validator": "^6.14.0", 60 | "helmet": "^5.0.1", 61 | "husky": "^8.0.1", 62 | "i18n": "^0.15.1", 63 | "ioredis": "^5.0.5", 64 | "jwks-rsa": "^2.0.5", 65 | "kill-port": "^1.6.1", 66 | "lodash": "^4.17.21", 67 | "moment": "^2.29.1", 68 | "mongoose": "6.2.4", 69 | "mongoose-to-swagger": "^1.4.0", 70 | "node-fetch": "2", 71 | "nodemon": "^2.0.15", 72 | "opossum": "^6.3.0", 73 | "pluralize": "^8.0.0", 74 | "response-time": "^2.3.2", 75 | "slack-notify": "^2.0.2", 76 | "swagger-ui-express": "^4.3.0", 77 | "uuid": "^8.3.2", 78 | "webpack": "^5.74.0", 79 | "webpack-hot-middleware": "^2.25.2" 80 | }, 81 | "devDependencies": { 82 | "@babel/core": "^7.16.7", 83 | "@babel/eslint-parser": "^7.18.2", 84 | "@babel/node": "^7.16.7", 85 | "@babel/plugin-transform-runtime": "^7.18.10", 86 | "@babel/preset-env": "^7.18.2", 87 | "@faker-js/faker": "^6.0.0-alpha.5", 88 | "babel-loader": "^8.2.5", 89 | "eslint": "^8.6.0", 90 | "eslint-config-prettier": "^8.5.0", 91 | "eslint-plugin-prettier": "^4.0.0", 92 | "file-loader": "^6.2.0", 93 | "jest": "^27.4.7", 94 | "jest-coverage-badges": "^1.1.2", 95 | "link-module-alias": "^1.2.0", 96 | "mockingoose": "^2.15.2", 97 | "pre-commit": "^1.2.2", 98 | "prettier": "^2.6.2", 99 | "prettier-standard": "^16.4.1", 100 | "regenerator-runtime": "^0.13.9", 101 | "supertest": "^6.2.2", 102 | "terser-webpack-plugin": "^5.3.6", 103 | "webpack-cli": "^4.10.0" 104 | }, 105 | "precommit": "lint:staged", 106 | "lint-staged": { 107 | "*.js": [ 108 | "npm run lint:eslint:fix", 109 | "git add --force", 110 | "jest --findRelatedTests $STAGED_FILES" 111 | ], 112 | "*.json": [ 113 | "prettier --write", 114 | "git add --force" 115 | ] 116 | }, 117 | "husky": { 118 | "hooks": { 119 | "pre-commit": "pretty-quick --staged" 120 | } 121 | }, 122 | "_moduleAliases": { 123 | "utils": "./server/utils", 124 | "middlewares": "./server/middlewares", 125 | "server": "./server", 126 | "api": "./server/api", 127 | "config": "./config", 128 | "services": "./server/services", 129 | "database": "./server/database", 130 | "daos": "./server/daos" 131 | } 132 | } 133 | -------------------------------------------------------------------------------- /server/daos/order.js: -------------------------------------------------------------------------------- 1 | import moment from 'moment'; 2 | import { Orders } from 'database/models/orders'; 3 | 4 | export const createNewOrder = async orderData => { 5 | try { 6 | return Orders.create(orderData); 7 | } catch (error) { 8 | throw error; 9 | } 10 | }; 11 | 12 | export const totalAmtForDate = async date => { 13 | try { 14 | let startDate = moment(date).startOf('day'); 15 | let endDate = moment(date).endOf('day'); 16 | let aggregateQuery = []; 17 | 18 | aggregateQuery.push({ 19 | $project: { 20 | _id: 1, 21 | totalPrice: 1, 22 | purchasedProducts: 1, 23 | createdAt: 1 24 | } 25 | }); 26 | 27 | aggregateQuery.push({ 28 | $match: { 29 | createdAt: { 30 | $gte: new Date(startDate), 31 | $lte: new Date(endDate) 32 | } 33 | } 34 | }); 35 | aggregateQuery.push({ 36 | $group: { 37 | _id: null, 38 | totalPrice: { $sum: '$totalPrice' } 39 | } 40 | }); 41 | const data = await Orders.aggregate(aggregateQuery).exec(); 42 | return data[0]?.totalPrice || 0; 43 | } catch (error) { 44 | throw error; 45 | } 46 | }; 47 | 48 | export const earliestCreatedDate = async () => { 49 | try { 50 | const order = await Orders.findOne().sort({ createdAt: 1 }).limit(1); 51 | 52 | return order.createdAt.toISOString().split('T')[0]; 53 | } catch (error) { 54 | throw error; 55 | } 56 | }; 57 | 58 | export const totalByDateForCategory = async (date, category) => { 59 | try { 60 | let startDate = moment(date).startOf('day').toISOString(); 61 | let endDate = moment(date).endOf('day').toISOString(); 62 | let aggregateQuery = []; 63 | 64 | aggregateQuery.push({ 65 | $project: { 66 | _id: 1, 67 | totalPrice: 1, 68 | purchasedProducts: 1, 69 | createdAt: 1 70 | } 71 | }); 72 | aggregateQuery.push({ 73 | $unwind: { 74 | path: '$purchasedProducts', 75 | preserveNullAndEmptyArrays: true 76 | } 77 | }); 78 | aggregateQuery.push({ 79 | $match: { 80 | 'purchasedProducts.category': category, 81 | createdAt: { 82 | $gte: new Date(startDate), 83 | $lte: new Date(endDate) 84 | } 85 | } 86 | }); 87 | aggregateQuery.push({ 88 | $group: { 89 | _id: '$_id', 90 | totalPrice: { $first: '$totalPrice' } 91 | } 92 | }); 93 | aggregateQuery.push({ 94 | $group: { 95 | _id: null, 96 | totalPrice: { $sum: '$totalPrice' } 97 | } 98 | }); 99 | 100 | const data = await Orders.aggregate(aggregateQuery).exec(); 101 | return data[0]?.totalPrice || 0; 102 | } catch (error) { 103 | throw error; 104 | } 105 | }; 106 | 107 | export const countByDate = async date => { 108 | try { 109 | let startDate = moment(date).startOf('day'); 110 | let endDate = moment(date).endOf('day'); 111 | let aggregateQuery = []; 112 | 113 | aggregateQuery.push({ 114 | $project: { 115 | _id: 1, 116 | totalPrice: 1, 117 | purchasedProducts: 1, 118 | createdAt: 1 119 | } 120 | }); 121 | 122 | aggregateQuery.push({ 123 | $match: { 124 | createdAt: { 125 | $gte: new Date(startDate), 126 | $lte: new Date(endDate) 127 | } 128 | } 129 | }); 130 | aggregateQuery.push({ 131 | $count: 'totalOrder' 132 | }); 133 | 134 | const totalOrder = await Orders.aggregate(aggregateQuery).exec(); 135 | return totalOrder[0]?.totalOrder || 0; 136 | } catch (error) { 137 | throw error; 138 | } 139 | }; 140 | 141 | export const countByDateForCategory = async (date, category) => { 142 | try { 143 | let startDate = moment(date).startOf('day').toISOString(); 144 | let endDate = moment(date).endOf('day').toISOString(); 145 | let aggregateQuery = []; 146 | 147 | aggregateQuery.push({ 148 | $project: { 149 | _id: 1, 150 | totalPrice: 1, 151 | purchasedProducts: 1, 152 | createdAt: 1 153 | } 154 | }); 155 | aggregateQuery.push({ 156 | $unwind: { 157 | path: '$purchasedProducts', 158 | preserveNullAndEmptyArrays: true 159 | } 160 | }); 161 | aggregateQuery.push({ 162 | $match: { 163 | 'purchasedProducts.category': category, 164 | createdAt: { 165 | $gte: new Date(startDate), 166 | $lte: new Date(endDate) 167 | } 168 | } 169 | }); 170 | aggregateQuery.push({ 171 | $group: { 172 | _id: '$_id' 173 | } 174 | }); 175 | aggregateQuery.push({ 176 | $count: 'totalOrder' 177 | }); 178 | const totalCount = await Orders.aggregate(aggregateQuery).exec(); 179 | return totalCount[0]?.totalOrder || 0; 180 | } catch (error) { 181 | throw error; 182 | } 183 | }; 184 | -------------------------------------------------------------------------------- /seeders/index.js: -------------------------------------------------------------------------------- 1 | const { default: faker } = require('@faker-js/faker'); 2 | const random = require('lodash/random'); 3 | const range = require('lodash/range'); 4 | const moment = require('moment'); 5 | const { runSeeders, connectToMongo, createProduct } = require('./utils'); 6 | const { Orders } = require('database/models/orders'); 7 | const { Products } = require('database/models/products'); 8 | const { Stores } = require('database/models/stores'); 9 | const { Suppliers } = require('database/models/suppliers'); 10 | const { StoreProducts } = require('database/models/storeProducts'); 11 | const { SupplierProducts } = require('database/models/supplierProducts'); 12 | 13 | const OCT_10_1994 = 782980686236; 14 | 15 | const seed = async () => { 16 | console.log('connected to mongodb::index'); 17 | await Promise.all([ 18 | connectToMongo().then(async () => { 19 | const divisor = process.env.DIVISOR || 10; 20 | for (let i = 0; i < 5000 / divisor; i++) { 21 | console.log( 22 | '------------------------------------\nSeeding products' 23 | ); 24 | const seedProducts = await Promise.all( 25 | range(1, 2000 / divisor).map(async (value, index) => 26 | createProduct(true) 27 | ) 28 | ); 29 | const products = await Products.insertMany(seedProducts, { 30 | writeConcern: { w: 0 } 31 | }); 32 | console.log( 33 | '------------------------------------\nSeeding stores' 34 | ); 35 | const seedStores = range(0, 200 / divisor).map( 36 | (value, index) => ({ 37 | name: faker.company.companyName(), 38 | address: faker.address.streetAddress(true) 39 | }) 40 | ); 41 | const stores = await Stores.insertMany(seedStores, { 42 | writeConcern: { w: 0 } 43 | }); 44 | console.log( 45 | '------------------------------------\nSeeding suppliers' 46 | ); 47 | const seedSuppliers = range(0, 200 / divisor).map( 48 | (value, index) => ({ 49 | name: faker.company.companyName() 50 | }) 51 | ); 52 | const suppliers = await Suppliers.insertMany(seedSuppliers, { 53 | writeConcern: { w: 0 } 54 | }); 55 | 56 | const seedStoreProducts = []; 57 | const seedSupplierProducts = []; 58 | products.forEach(product => { 59 | const storeIndex = random(0, stores.length - 1); 60 | const supplierIndex = random(0, suppliers.length - 1); 61 | seedStoreProducts.push({ 62 | productId: product._id, 63 | product, 64 | store: stores[storeIndex], 65 | storeId: stores[storeIndex]._id 66 | }); 67 | const supplier = suppliers[supplierIndex]; 68 | seedSupplierProducts.push({ 69 | productId: product._id, 70 | product, 71 | supplier: supplier, 72 | supplierId: supplier._id 73 | }); 74 | }); 75 | 76 | console.log( 77 | '------------------------------------\nSeeding seedStoreProducts and seedSupplierProducts' 78 | ); 79 | StoreProducts.insertMany(seedStoreProducts, { 80 | writeConcern: { w: 0 } 81 | }); 82 | SupplierProducts.insertMany(seedSupplierProducts, { 83 | writeConcern: { w: 0 } 84 | }); 85 | 86 | const seedOrders = []; 87 | console.log( 88 | '------------------------------------\nSeeding orders' 89 | ); 90 | await Promise.all( 91 | range(0, 2500 / divisor).map(async orderIndex => { 92 | const order = {}; 93 | const purchasedProducts = []; 94 | const numberOfProducts = random(1, 6); 95 | products.forEach(product => { 96 | let qty = Math.floor(Math.random() * 3); 97 | product['quantity'] = qty ? qty : 1; 98 | }); 99 | 100 | let total = 0; 101 | for (let i = 0; i < numberOfProducts; i++) { 102 | const product = 103 | products[random(0, products.length - 1)]; 104 | total += product.price * product.quantity; 105 | purchasedProducts.push(product); 106 | } 107 | order.totalPrice = total; 108 | order.purchasedProducts = purchasedProducts; 109 | order.createdAt = moment( 110 | OCT_10_1994 + 86400000 * orderIndex 111 | ).format('YYYY-MM-DDTHH:mm:ss.SSSZ'); 112 | seedOrders.push(order); 113 | }) 114 | ); 115 | 116 | try { 117 | Orders.insertMany(seedOrders, { 118 | writeConcern: { w: 0 } 119 | }); 120 | } catch (e) { 121 | console.log({ e: e }); 122 | } 123 | } 124 | }) 125 | ]); 126 | }; 127 | 128 | runSeeders(seed); 129 | -------------------------------------------------------------------------------- /__tests__/server/daos/order.test.js: -------------------------------------------------------------------------------- 1 | import { 2 | countByDate, 3 | countByDateForCategory, 4 | createNewOrder, 5 | earliestCreatedDate, 6 | totalAmtForDate, 7 | totalByDateForCategory 8 | } from 'daos/order'; 9 | import { Orders } from 'database/models/orders'; 10 | import { mockData } from 'utils/mockData'; 11 | import moment from 'moment'; 12 | const { 13 | MOCK_TOTAL_AMT: mockTotalAmt, 14 | MOCK_TOTAL_COUNT: mockTotalCount, 15 | MOCK_ORDER: mockOrder, 16 | MOCK_ORDER_DETAILS: mockOrderDetails 17 | } = mockData; 18 | describe('Order daos tests', () => { 19 | const date = '1994-10-24'; 20 | const model = 'orders'; 21 | const category = 'Sports'; 22 | const mockError = new Error('Mock Error'); 23 | let mockingoose; 24 | beforeEach(() => { 25 | mockingoose = require('mockingoose'); 26 | }); 27 | 28 | describe('createNewOrder function test', () => { 29 | it('should ensure it return 200 and create a new order', async () => { 30 | jest.spyOn(Orders, 'create').mockImplementationOnce( 31 | () => mockOrderDetails 32 | ); 33 | const res = await createNewOrder(mockOrder); 34 | expect(res).toBe(mockOrderDetails); 35 | }); 36 | 37 | it('should throw an error from catch block', async () => { 38 | jest.spyOn(Orders, 'create').mockImplementationOnce(() => { 39 | throw mockError; 40 | }); 41 | expect(async () => { 42 | await createNewOrder(mockOrder); 43 | }).rejects.toThrow(mockError); 44 | }); 45 | }); 46 | describe('totalAmtForDate function tests', () => { 47 | it('should return total amount for the day', async () => { 48 | mockingoose(model).toReturn(mockTotalAmt, 'aggregate'); 49 | const res = await totalAmtForDate(date); 50 | expect(res).toBe(mockTotalAmt[0].totalPrice); 51 | }); 52 | 53 | it('should throw an error if an error is thrown from db', async () => { 54 | mockingoose(model).toReturn(mockError, 'aggregate'); 55 | expect(async () => { 56 | await totalAmtForDate(date); 57 | }).rejects.toThrow(mockError); 58 | }); 59 | it('should return total amount as 0 for the day as no order is placed on that day', async () => { 60 | mockingoose(model).toReturn([], 'aggregate'); 61 | const res = await totalAmtForDate(date); 62 | expect(res).toBe(0); 63 | }); 64 | }); 65 | 66 | describe('earliestCreatedDate functions', () => { 67 | it('should return the date of the first order', async () => { 68 | mockingoose(model).toReturn({}, 'findOne'); 69 | const res = await earliestCreatedDate(); 70 | expect(res).toBe(moment.utc().format('YYYY-MM-DD')); 71 | }); 72 | it('should throw an error if an error is thrown from db', async () => { 73 | mockingoose(model).toReturn(mockError, 'findOne'); 74 | expect(async () => { 75 | await earliestCreatedDate(); 76 | }).rejects.toThrow(mockError); 77 | }); 78 | }); 79 | 80 | describe('totalByDateForCategory functions tests', () => { 81 | it('should return total amt for the day for a category', async () => { 82 | mockingoose(model).toReturn(mockTotalAmt, 'aggregate'); 83 | const res = await totalByDateForCategory(date, category); 84 | expect(res).toBe(mockTotalAmt[0].totalPrice); 85 | }); 86 | 87 | it('should throw an error if an error is thrown from db', async () => { 88 | mockingoose(model).toReturn(mockError, 'aggregate'); 89 | expect(async () => { 90 | await totalByDateForCategory(date, category); 91 | }).rejects.toThrow(mockError); 92 | }); 93 | it('should return total amt as 0 for the day for a category as no order is placed for that category on that day', async () => { 94 | mockingoose(model).toReturn([], 'aggregate'); 95 | const res = await totalByDateForCategory(date, category); 96 | expect(res).toBe(0); 97 | }); 98 | }); 99 | 100 | describe('countByDate funnction tests', () => { 101 | it('should return the total count of order for the day', async () => { 102 | mockingoose(model).toReturn(mockTotalCount, 'aggregate'); 103 | const res = await countByDate(date); 104 | expect(res).toBe(mockTotalCount[0].totalOrder); 105 | }); 106 | 107 | it('should throw an error if db throws an error', async () => { 108 | mockingoose(model).toReturn(mockError, 'aggregate'); 109 | expect(async () => { 110 | await countByDate(date); 111 | }).rejects.toThrow(mockError); 112 | }); 113 | 114 | it('should return the total count of order as 0 for the day as no order is placed on that date', async () => { 115 | mockingoose(model).toReturn([], 'aggregate'); 116 | const res = await countByDate(date); 117 | expect(res).toBe(0); 118 | }); 119 | }); 120 | 121 | describe('countByDateForCategory functions tests', () => { 122 | it('should return count for the day for a category', async () => { 123 | mockingoose(model).toReturn(mockTotalCount, 'aggregate'); 124 | const res = await countByDateForCategory(date, category); 125 | expect(res).toBe(mockTotalCount[0].totalOrder); 126 | }); 127 | 128 | it('should throw an error if an error is thrown from db ', async () => { 129 | mockingoose(model).toReturn(mockError, 'aggregate'); 130 | expect(async () => { 131 | await countByDateForCategory(date, category); 132 | }).rejects.toThrow(mockError); 133 | }); 134 | 135 | it('should return count for the day for a category as 0 as no order was placed for that category on that day', async () => { 136 | mockingoose(model).toReturn([], 'aggregate'); 137 | const res = await countByDateForCategory(date, category); 138 | expect(res).toBe(0); 139 | }); 140 | }); 141 | }); 142 | -------------------------------------------------------------------------------- /__tests__/server/middlewares/auth/paths.test.js: -------------------------------------------------------------------------------- 1 | import { authMiddlewareFunc } from 'server/middlewares/auth/utils'; 2 | import { paths } from 'server/middlewares/auth/paths'; 3 | import { SCOPE_TYPE } from 'utils/constants'; 4 | import { isEqual } from 'lodash'; 5 | 6 | jest.mock('server/middlewares/auth/utils', () => ({ 7 | authMiddlewareFunc: jest.fn() 8 | })); 9 | 10 | const testPaths = [ 11 | { 12 | path: '/assign-roles', 13 | scopes: [SCOPE_TYPE.SUPER_ADMIN], 14 | method: 'PUT' 15 | }, 16 | { 17 | path: '/roles', 18 | scopes: [SCOPE_TYPE.SUPER_ADMIN], 19 | method: 'POST' 20 | }, 21 | { 22 | path: '/stores', 23 | scopes: [SCOPE_TYPE.SUPER_ADMIN], 24 | method: 'POST' 25 | }, 26 | { 27 | path: '/aggregate/order-amount', 28 | scopes: [SCOPE_TYPE.SUPER_ADMIN], 29 | method: 'GET' 30 | }, 31 | { 32 | path: '/orders', 33 | method: 'POST' 34 | }, 35 | { 36 | path: '/orders/:_id', 37 | method: 'GET' 38 | }, 39 | { 40 | path: '/orders', 41 | method: 'GET' 42 | }, 43 | { 44 | path: '/referenced-orders', 45 | method: 'GET' 46 | }, 47 | { 48 | path: '/unsharded-orders', 49 | method: 'GET' 50 | }, 51 | { 52 | path: '/unsharded-referenced-orders', 53 | method: 'GET' 54 | }, 55 | { 56 | path: '/stores', 57 | scopes: [SCOPE_TYPE.SUPER_ADMIN, SCOPE_TYPE.STORE_ADMIN], 58 | method: 'GET', 59 | hasCustomAuth: true 60 | }, 61 | { 62 | path: '/stores/:_id', 63 | scopes: [SCOPE_TYPE.SUPER_ADMIN, SCOPE_TYPE.STORE_ADMIN], 64 | method: 'GET', 65 | hasCustomAuth: true 66 | }, 67 | { 68 | path: '/stores/:_id', 69 | scopes: [SCOPE_TYPE.SUPER_ADMIN, SCOPE_TYPE.STORE_ADMIN], 70 | method: 'PATCH', 71 | hasCustomAuth: true 72 | }, 73 | 74 | { 75 | path: '/stores/:_id', 76 | scopes: [SCOPE_TYPE.SUPER_ADMIN, SCOPE_TYPE.STORE_ADMIN], 77 | method: 'DELETE', 78 | hasCustomAuth: true 79 | }, 80 | { 81 | path: '/store-products', 82 | scopes: [SCOPE_TYPE.SUPER_ADMIN, SCOPE_TYPE.STORE_ADMIN], 83 | method: 'POST', 84 | hasCustomAuth: true 85 | }, 86 | { 87 | path: '/store-products', 88 | scopes: [SCOPE_TYPE.SUPER_ADMIN, SCOPE_TYPE.STORE_ADMIN], 89 | method: 'GET', 90 | hasCustomAuth: true 91 | }, 92 | { 93 | path: '/store-products/:_id', 94 | scopes: [SCOPE_TYPE.SUPER_ADMIN, SCOPE_TYPE.STORE_ADMIN], 95 | method: 'GET', 96 | hasCustomAuth: true 97 | }, 98 | { 99 | path: '/store-products/:_id', 100 | scopes: [SCOPE_TYPE.SUPER_ADMIN, SCOPE_TYPE.STORE_ADMIN], 101 | method: 'DELETE', 102 | hasCustomAuth: true 103 | }, 104 | { 105 | path: '/suppliers', 106 | scopes: [SCOPE_TYPE.SUPER_ADMIN], 107 | method: 'POST' 108 | }, 109 | { 110 | path: '/suppliers/:_id', 111 | scopes: [SCOPE_TYPE.SUPER_ADMIN, SCOPE_TYPE.SUPPLIER_ADMIN], 112 | method: 'GET', 113 | hasCustomAuth: true 114 | }, 115 | { 116 | path: '/suppliers', 117 | scopes: [SCOPE_TYPE.SUPER_ADMIN, SCOPE_TYPE.SUPPLIER_ADMIN], 118 | method: 'GET', 119 | hasCustomAuth: true 120 | }, 121 | { 122 | path: '/suppliers/:_id', 123 | scopes: [SCOPE_TYPE.SUPER_ADMIN, SCOPE_TYPE.SUPPLIER_ADMIN], 124 | method: 'PATCH', 125 | hasCustomAuth: true 126 | }, 127 | { 128 | path: '/suppliers/:_id', 129 | scopes: [SCOPE_TYPE.SUPER_ADMIN, SCOPE_TYPE.SUPPLIER_ADMIN], 130 | method: 'DELETE', 131 | hasCustomAuth: true 132 | }, 133 | { 134 | path: '/supplier-products', 135 | scopes: [SCOPE_TYPE.SUPER_ADMIN, SCOPE_TYPE.SUPPLIER_ADMIN], 136 | method: 'POST', 137 | hasCustomAuth: true 138 | }, 139 | { 140 | path: '/supplier-products', 141 | scopes: [SCOPE_TYPE.SUPER_ADMIN, SCOPE_TYPE.SUPPLIER_ADMIN], 142 | method: 'GET', 143 | hasCustomAuth: true 144 | }, 145 | { 146 | path: '/supplier-products/:_id', 147 | scopes: [SCOPE_TYPE.SUPER_ADMIN, SCOPE_TYPE.SUPPLIER_ADMIN], 148 | method: 'GET', 149 | hasCustomAuth: true 150 | }, 151 | { 152 | path: '/supplier-products/:_id', 153 | scopes: [SCOPE_TYPE.SUPER_ADMIN, SCOPE_TYPE.SUPPLIER_ADMIN], 154 | method: 'PATCH', 155 | hasCustomAuth: true 156 | }, 157 | { 158 | path: '/supplier-products/:_id', 159 | scopes: [SCOPE_TYPE.SUPER_ADMIN, SCOPE_TYPE.SUPPLIER_ADMIN], 160 | method: 'DELETE', 161 | hasCustomAuth: true 162 | } 163 | ]; 164 | describe('paths', () => { 165 | it('check if all the paths are present', async () => { 166 | let i = 0; 167 | function checkIfPathMatches(path, testPath) { 168 | return path.path.toUpperCase() === testPath.path.toUpperCase(); 169 | } 170 | function checkIfMethodMatches(path, testPath) { 171 | return path.method.toUpperCase() === testPath.method.toUpperCase(); 172 | } 173 | function checkIfScopesMatch(path, testPath) { 174 | return isEqual(path.scopes, testPath.scopes); 175 | } 176 | await Promise.all( 177 | testPaths.map(async testPath => { 178 | let foundPath = false; 179 | await Promise.all( 180 | paths.map(async path => { 181 | if ( 182 | checkIfMethodMatches(path, testPath) && 183 | checkIfPathMatches(path, testPath) && 184 | checkIfScopesMatch(path, testPath) 185 | ) { 186 | foundPath = true; 187 | 188 | if (testPath.hasCustomAuth) { 189 | path.authMiddleware( 190 | { params: {}, user: {}, body: {} }, 191 | {}, 192 | () => {} 193 | ); 194 | expect( 195 | authMiddlewareFunc 196 | ).toHaveBeenCalledTimes(++i); 197 | } 198 | } 199 | }) 200 | ); 201 | expect(foundPath).toBe(true); 202 | }) 203 | ); 204 | }); 205 | }); 206 | -------------------------------------------------------------------------------- /server/utils/swagUtils.js: -------------------------------------------------------------------------------- 1 | import path from 'path'; 2 | import pluralize from 'pluralize'; 3 | import m2s from 'mongoose-to-swagger'; 4 | import kebabCase from 'lodash/kebabCase'; 5 | import swaggerUi from 'swagger-ui-express'; 6 | import { REQUEST_TYPES } from 'api/customApisMapper'; 7 | import Pack from '../../package.json'; 8 | import { getModelFiles } from '.'; 9 | import customSwaggerDoc from '../../swagger.json'; 10 | 11 | export const REQUEST_METHODS = { 12 | [REQUEST_TYPES.create]: 'post', 13 | [REQUEST_TYPES.update]: 'patch', 14 | [REQUEST_TYPES.fetchOne]: 'get', 15 | [REQUEST_TYPES.fetchAll]: 'get', 16 | [REQUEST_TYPES.remove]: 'delete' 17 | }; 18 | export const SWAGGER_DOCS_PATH = '/api-docs/swagger.json'; 19 | 20 | export const DEFAULT_DEFINITIONS = { 21 | deleteResponse: { 22 | type: 'object', 23 | properties: { 24 | deletedCount: { 25 | type: 'integer', 26 | format: 'int64', 27 | example: 1 28 | } 29 | } 30 | } 31 | }; 32 | 33 | /** 34 | * @typedef CustomSwagger 35 | * @type {object} 36 | * @property {Array|object} tags 37 | * @property {object} paths 38 | * @property {object} definitions 39 | */ 40 | 41 | /** 42 | * 43 | * @param {any} app 44 | * @param {CustomSwagger} customSwagger 45 | */ 46 | export const registerSwagger = app => { 47 | const options = { 48 | swaggerOptions: { 49 | url: SWAGGER_DOCS_PATH 50 | } 51 | }; 52 | const swaggerDocument = generateSwaggerDoc(); 53 | appendToSwaggerDoc(swaggerDocument, customSwaggerDoc); 54 | app.get(SWAGGER_DOCS_PATH, (_, res) => res.json(swaggerDocument)); 55 | app.use( 56 | '/api-docs', 57 | swaggerUi.serveFiles(null, options), 58 | swaggerUi.setup(null, options) 59 | ); 60 | }; 61 | 62 | export const generateSwaggerDoc = () => { 63 | const swaggerDocument = { 64 | swagger: '2.0', 65 | info: { 66 | title: 'Node Mongo Express Documentation', 67 | version: Pack.version 68 | }, 69 | tags: [], 70 | paths: {}, 71 | definitions: {} 72 | }; 73 | const modelsFolderPath = path.join( 74 | __dirname, 75 | '../../server/database/models/' 76 | ); 77 | const fileArray = getModelFiles(modelsFolderPath); 78 | fileArray.forEach(f => { 79 | // eslint-disable-next-line prefer-template 80 | const { model } = require('server/database/models/' + f); 81 | const name = f.split('.')[0]; 82 | 83 | const { swaggerPaths, swaggerDefs } = swagGeneratorFactory(name, model); 84 | appendToSwaggerDoc(swaggerDocument, { 85 | paths: swaggerPaths, 86 | definitions: swaggerDefs, 87 | tags: { 88 | name, 89 | description: `${name} related endpoints` 90 | } 91 | }); 92 | }); 93 | return swaggerDocument; 94 | }; 95 | 96 | /** 97 | * 98 | * @param {any} swaggerDocument 99 | * @param {CustomSwagger} swaggerData 100 | */ 101 | export const appendToSwaggerDoc = (swaggerDocument, swaggerData) => { 102 | const { paths, definitions, tags } = swaggerData; 103 | if (Array.isArray(tags)) { 104 | swaggerDocument.tags.push(...tags); 105 | } else { 106 | swaggerDocument.tags.push(tags); 107 | } 108 | swaggerDocument.paths = { 109 | ...swaggerDocument.paths, 110 | ...paths 111 | }; 112 | swaggerDocument.definitions = { 113 | ...swaggerDocument.definitions, 114 | ...definitions 115 | }; 116 | }; 117 | 118 | export const swagGeneratorFactory = (name, model) => { 119 | const swaggerPaths = {}; 120 | const swaggerDefs = { 121 | ...DEFAULT_DEFINITIONS 122 | }; 123 | appendSwagDefs(name, model, swaggerDefs); 124 | Object.values(REQUEST_TYPES).forEach(type => 125 | appendSwagPaths(type, name, swaggerPaths) 126 | ); 127 | return { swaggerPaths, swaggerDefs }; 128 | }; 129 | 130 | export const appendSwagPaths = (type, name, swaggerPaths) => { 131 | if (type === REQUEST_TYPES.create && name === 'orders') { 132 | return; 133 | } 134 | const routeName = `/${kebabCase(name)}`; 135 | const method = REQUEST_METHODS[type]; 136 | const lowerType = type.toLowerCase(); 137 | const isPluralEnity = type === REQUEST_TYPES.fetchAll; 138 | const hasPathParam = ![ 139 | REQUEST_TYPES.create, 140 | REQUEST_TYPES.fetchAll 141 | ].includes(type); 142 | const entityName = isPluralEnity ? name : pluralize.singular(name); 143 | const summary = `${lowerType} ${entityName}`; 144 | const parameters = hasPathParam 145 | ? [ 146 | { 147 | name: '_id', 148 | in: 'path', 149 | description: `ID of ${pluralize.singular( 150 | name 151 | )} to ${lowerType}`, 152 | required: true, 153 | type: 'string' 154 | } 155 | ] 156 | : {}; 157 | const responses = { 158 | 200: { 159 | type: 'object', 160 | description: `${lowerType} ${entityName} is success`, 161 | schema: { 162 | type: 'object', 163 | properties: { 164 | data: isPluralEnity 165 | ? { 166 | type: 'array', 167 | items: { $ref: `#/definitions/${name}` } 168 | } 169 | : type === REQUEST_TYPES.remove 170 | ? { $ref: '#/definitions/deleteResponse' } 171 | : { $ref: `#/definitions/${name}` } 172 | } 173 | } 174 | }, 175 | 400: { 176 | type: 'object', 177 | description: `${lowerType} ${entityName} is failed`, 178 | schema: { 179 | type: 'object', 180 | required: ['error'], 181 | properties: { 182 | error: { 183 | type: 'string', 184 | example: `unable to ${lowerType} ${entityName}` 185 | } 186 | } 187 | } 188 | } 189 | }; 190 | const pathKey = !hasPathParam ? routeName : `${routeName}/{_id}`; 191 | swaggerPaths[pathKey] = { 192 | ...(swaggerPaths[pathKey] || {}), 193 | [method]: { 194 | tags: [name], 195 | summary, 196 | produces: ['application/json'], 197 | parameters, 198 | responses 199 | } 200 | }; 201 | }; 202 | 203 | export const appendSwagDefs = (name, model, swaggerDefs) => { 204 | const modelSchema = m2s(model); 205 | // modify model schema properties here 206 | if (modelSchema.properties.purchasedProducts) { 207 | modelSchema.properties.purchasedProducts = { 208 | $ref: '#/definitions/products' 209 | }; 210 | } 211 | swaggerDefs[name] = { 212 | type: 'object', 213 | ...modelSchema, 214 | title: undefined 215 | }; 216 | }; 217 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | 2 | 3 |
4 | 5 | 6 | 7 |

8 |

Node Mongo Express 9 |

10 |

11 | 12 |

13 | An enterprise Mongo-Express REST API built using nodejs showcasing - Testing Strategy, mongoDB sharding, models, a REST API Interface, support for Redis, aggregation queries, aggregation caching, circuit-breakers, slack integration, RBAC, rate limited APIs and multi-container queues and schedulers. 14 |

15 | 16 | --- 17 | 18 |

19 |

20 | Expert teams of digital product strategists, developers, and designers. 21 |

22 |

23 | 24 | 32 | 33 | --- 34 | 35 | We’re always looking for people who value their work, so come and join us. We are hiring! 36 | 37 |
38 | 39 | 43 | 44 | [![Node Mongo Express CI](https://github.com/wednesday-solutions/node-mongo-express/actions/workflows/ci.yml/badge.svg)](https://github.com/wednesday-solutions/node-mongo-express/actions/workflows/ci.yml) 45 | 46 | [![Node Mongo Express CD](https://github.com/wednesday-solutions/node-mongo-express/actions/workflows/cd.yml/badge.svg)](https://github.com/wednesday-solutions/node-mongo-express/actions/workflows/cd.yml) 47 | 48 | --- 49 | 50 |
51 | 52 | 53 |
54 |
55 | 56 | 57 |
58 | 59 | --- 60 | 61 | ## Pre-requisites 62 | 63 | - yarn 64 | - docker 65 | 66 | ## Features 67 | 68 | - Mongo support 69 | - Docker support 70 | - Rate limited APIs 71 | - RBAC middleware using Auth0 72 | - Sharding mongoDB collection support 73 | - Paginated APIs 74 | - Autogenerated APIs from mongoose models 75 | - Built in slack alerting mechanism 76 | - Suport for redis cache 77 | - Support for aggregate caching 78 | - Support for batch jobs in multi-container environment 79 | - Support for circuit breakers 80 | - Autogenerated swagger documentation 81 | - Load testing using k6 82 | - Support for i18n 83 | 84 | ## Running Load tests 85 | 86 | - [Install](https://k6.io/docs/getting-started/installation/) k6 87 | - Execute the following command: `k6 run __tests__/__load__/script.js` 88 | ## Build and run docker container locally 89 | 90 | - docker-compose down 91 | - docker-compose build 92 | - docker-compose up 93 | 94 | # Shard setup 95 | 96 | Run the following script 97 | 98 | ``` 99 | ./setup-shards/scripts/setup/base.sh 100 | ``` 101 | 102 | Take a look at [this](./setup-shards/README.md) to create shards and replica sets. 103 | 104 | ## Seeders 105 | 106 | Run the following command to begin seeding 107 | 108 | ``` 109 | ./seeders/seed.sh 110 | ``` 111 | 112 | ## How to start 113 | 114 | - cd `node-mongo-express` 115 | - yarn 116 | - ./setup-shards/scripts/setup/base.sh 117 | - cp .env.example .env.local 118 | - ./seeders/seed.sh 119 | - yarn start 120 | - open browser to `localhost:9000` (port default to 9000) 121 | 122 | ## API Documentation 123 | 124 | Once you've to the server started check out the api documentation at [/api-docs](http://localhost:9000/api-docs) 125 | 126 | ## Navigating the code base 127 | 128 | - The entry point of the application is the [server/index.js](./server/index.js) 129 | - The server/app.js imports the APIs from [server/api/index.js](./server/api/index.js) 130 | - All the different APIs in the [server/api](./server/api) are registered [here](./server/api/index.js) 131 | - MongoDB is used to store data & mongoose is used as the ORM 132 | - [mongo](./server/database/mongo.js) 133 | - [models](./server/database/models/) 134 | - The template has support for the following middlewares 135 | - [auth](./server/middlewares/auth/) 136 | - [injectRequestId](./server/middlewares/injectRequestId) 137 | - [rateLimiter](./server/middlewares/rateLimiter) 138 | - The template has inbuilt support for 139 | - [redis](./server/services/redis.js) 140 | - [circuitBreakers](./server/services/circuitBreaker.js) 141 | - [slack alerts](./server/utils/slackNotify.js) 142 | - [docker](./Dockerfile) 143 | - [docker-compose](./docker-compose.yml) 144 | - [auto generated apis](./server/api/requestGenerators.js) 145 | - [sharding of collections](./setup-shards) 146 | - [aggregate caching](./server/api/aggregate/) 147 | 148 | ## Philosophy 149 | 150 | When using NoSQLs you are optimising for read performance. We're doing this by denormalising data. There are multiple copies of the same data. For example 151 | 152 | - Orders contains purchasedProducts which contains Products. Instead of referencing here we embed 153 | - SupplierProducts contains embedded objects for both Suppliers and Products 154 | - StoreProducts contains embedded objects for both Stores and Products 155 | 156 | This makes our application write heavy. Every time there is a change to a product we need to make a change to 157 | 158 | - SupplierProducts 159 | - StoreProducts 160 | - Products 161 | 162 | Orders is not impacted since a change in the product after purchase will not affect the order. 163 | 164 | However the application is able to perform extremely fast reads. 2 reasons for better performance is 165 | 166 | - shards 167 | - document embedding 168 | 169 | NoSQLs are also good for handling large volumes of data. This is supported due to its ability to have shards. In this application we create 4 shards and the data is distributed amongst these shards. 170 | 171 | These are the shard keys that we use 172 | 173 | - \_id 174 | - Order 175 | - name 176 | - Products 177 | - Suppliers 178 | - Stores 179 |
We got really good distribution across shards(24-26%) per shard after seeding 4 million records. It's possible to get a hot shard due to this but we're yet to see that. 180 | - productId 181 | - SupplierProducts 182 | - StoreProducts 183 |
productId is chosen as the shard key since we anticipate that the queries for fetching all suppliers/stores that sell a particular product will be much more than fetching all products of a supplier/store. 184 | --------------------------------------------------------------------------------