├── .circleci └── config.yml ├── .dockerignore ├── .editorconfig ├── .env.example ├── .eslintignore ├── .eslintrc.json ├── .gcloudignore ├── .github └── ISSUE_TEMPLATE │ ├── bug_report.md │ └── feature_request.md ├── .gitignore ├── .prettierignore ├── .prettierrc.js ├── CODE_OF_CONDUCT.md ├── CONTRIBUTING.md ├── Dockerfile ├── LICENSE ├── README.md ├── infra ├── README.md ├── k8s-dev │ ├── deployments │ │ └── servicename-depl.yaml │ ├── ingress │ │ └── ingress-srv.yaml │ ├── secrets │ │ ├── google-application-credentials.yaml │ │ ├── proxy-to-another-gcp-project.yaml │ │ └── shared-secrets.yaml │ └── skaffold.yaml ├── k8s │ ├── certificates │ │ └── managed-cert.yaml │ ├── deployments │ │ └── servicename-depl.yaml │ ├── ingress │ │ └── ingress-srv.yaml │ ├── secrets │ │ ├── google-application-credentials.yaml │ │ ├── proxy-to-another-gcp-project.yaml │ │ └── shared-secrets.yaml │ └── skaffold.yaml └── terraform │ ├── .gitignore │ ├── .terraform.lock.hcl │ ├── README.md │ ├── compute.tf │ ├── main.tf │ ├── registry.tf │ ├── storage.tf │ └── variables.tf ├── jest.config.js ├── package-lock.json ├── package.json ├── public ├── index.html └── stylesheets │ └── style.css ├── scripts ├── generate-env.sh └── skaffold-dev.sh ├── src ├── api │ ├── config │ │ └── roles.config.ts │ └── v1 │ │ ├── app │ │ ├── README.md │ │ ├── app.controller.ts │ │ └── app.route.ts │ │ ├── auth │ │ ├── auth.controller.ts │ │ └── auth.route.ts │ │ ├── database-logs │ │ └── databaseLog.model.ts │ │ ├── index.route.ts │ │ ├── swagger │ │ ├── swagger.json │ │ └── swagger.route.ts │ │ ├── typedoc │ │ └── typedoc.route.ts │ │ └── user │ │ └── user.model.ts ├── bin │ └── server.ts ├── config │ ├── gcloud │ │ ├── README.md │ │ ├── google-application-credentials-example.json │ │ └── google-web-client-secret-example.json │ ├── mongodb.config.ts │ ├── mysql.config.ts │ └── passport.config.ts ├── errors │ ├── CustomError.error.ts │ ├── NotAuthorized.error.ts │ ├── NotFound.error.ts │ └── index.ts ├── index.ts ├── jobs │ └── agenda.ts ├── lib │ └── logger.ts ├── middlewares │ ├── apiRateLimit.middleware.ts │ ├── catchAsyncHandler.middleware.ts │ ├── currentUser.middleware.ts │ ├── errorHandler.middleware.ts │ ├── morgan.middleware.ts │ ├── requireAdminRole.middleware.ts │ ├── requireAuthentication.middleware.ts │ └── verifyApiRights.middleware.ts ├── services │ ├── email │ │ └── sparkpost.service.ts │ ├── google-pub-sub │ │ └── pubsub.service.ts │ ├── maps │ │ └── maps.service.ts │ ├── messaging │ │ ├── firebase.service.ts │ │ └── whatsapp.service.ts │ ├── pdf │ │ └── pdf.service.ts │ ├── upload │ │ └── upload.service.ts │ └── xml │ │ └── xml.service.ts ├── tests │ ├── index.test.ts │ └── utils │ │ ├── dates.utils.test.ts │ │ ├── generators.utils.test.ts │ │ └── objects.utils.test.ts ├── types │ └── xss-clean.d.ts └── utils │ ├── createCookieFromToken.utils.ts │ ├── dates.utils.ts │ ├── generators.utils.ts │ └── objects.utils.ts ├── tsconfig.json └── typedoc.json /.circleci/config.yml: -------------------------------------------------------------------------------- 1 | # This config is equivalent to both the '.circleci/extended/orb-free.yml' and the base '.circleci/config.yml' 2 | version: 2.1 3 | 4 | # Orbs are reusable packages of CircleCI configuration that you may share across projects, enabling you to create encapsulated, parameterized commands, jobs, and executors that can be used across multiple projects. 5 | # See: https://circleci.com/docs/2.0/orb-intro/ 6 | orbs: 7 | node: circleci/node@5.1.0 8 | 9 | # Invoke jobs via workflows 10 | # See: https://circleci.com/docs/2.0/configuration-reference/#workflows 11 | workflows: 12 | sample: # This is the name of the workflow, feel free to change it to better match your workflow. 13 | # Inside the workflow, you define the jobs you want to run. 14 | jobs: 15 | - node/test: 16 | # This is the node version to use for the `cimg/node` tag 17 | # Relevant tags can be found on the CircleCI Developer Hub 18 | # https://circleci.com/developer/images/image/cimg/node 19 | version: '18.14.2' 20 | # If you are using yarn, change the line below from "npm" to "yarn" 21 | pkg-manager: npm 22 | -------------------------------------------------------------------------------- /.dockerignore: -------------------------------------------------------------------------------- 1 | node_modules 2 | npm-debug.log 3 | Dockerfile 4 | .git 5 | .gitignore 6 | build 7 | README.md 8 | -------------------------------------------------------------------------------- /.editorconfig: -------------------------------------------------------------------------------- 1 | root = true 2 | 3 | [*] 4 | indent_style = space 5 | indent_size = 2 6 | end_of_line = lf 7 | charset = utf-8 8 | insert_final_newline = true 9 | -------------------------------------------------------------------------------- /.env.example: -------------------------------------------------------------------------------- 1 | HOST=0.0.0.0 2 | PORT=3000 3 | SERVICE_NAME='your_service_name' 4 | 5 | #JWT CONFIGURATION 6 | JWT_KEY='your_secret' 7 | SECRET='my_super_secret' 8 | HASH=10 9 | #JWT_PRIVATE_SECRET='jwt-private-secret' 10 | #JWT_PUBLIC_SECRET='jwt-public-secret' 11 | 12 | #GOOGLE CLOUD CONFIGURATION 13 | #Go to GCP and create a service account and replace all the fields with yours in the json file 14 | GOOGLE_APPLICATION_CREDENTIALS='./src/config/gcloud/google-application-credentials.json' 15 | GOOGLE_PROJECT_ID='your_google_project_id' 16 | GOOGLE_STORAGE_BUCKET_NAME='your_google_storage_bucket_name' 17 | GOOGLE_CLIENT_ID='your_google_client_id' 18 | GOOGLE_CLIENT_SECRET='your_google_client_secret' 19 | GOOGLE_MAPS_API_KEY='your_google_maps_api_key' 20 | 21 | #CLIENT CONFIGURATION 22 | CLIENT_URL='your_client_url_to_authorize' 23 | 24 | #MONGO DB CONFIGURATION 25 | MONGO_URI='your_mongo_db_connection' 26 | MONGO_URI_TEST='your_mongo_db_connection_test' 27 | MONGO_USER='your_mongo_user' 28 | MONGO_PASS='your_mongo_password' 29 | 30 | #MYSQL CONFIGURATION 31 | MYSQL_HOST_STAGE='your_myql_host_stage' 32 | MYSQL_USER_STAGE='your_myql_user' 33 | MYSQL_PASSWORD_STAGE='your_myql_pass' 34 | MYSQL_DB_STAGE='your_myql_db_name' 35 | MYSQL_SOCKET_STAGE='/your/socket-cloud-sql' 36 | 37 | MYSQL_HOST_PROD='your_myql_host_stage' 38 | MYSQL_USER_PROD='your_myql_user' 39 | MYSQL_PASSWORD_PROD='your_myql_pass' 40 | MYSQL_DB_PROD='your_myql_db_name' 41 | MYSQL_SOCKET_PROD='/your/socket-cloud-sql' 42 | 43 | 44 | #SPARKPOST CONFIGURATION 45 | SPARKPOST_API_KEY='your_sparkpost_test_api_key' 46 | #SPARKPOST_API_KEY='your_sparkpost_live_api_key' 47 | SPARKPOST_SENDER_DOMAIN='your_sparkpost_sender_domain' 48 | 49 | # MESSAGEBIRD CONFIGURATION 50 | MESSAGEBIRD_ACCESS_KEY='your_messagbird_access_key' #test key 51 | #MESSAGEBIRD_ACCESS_KEY='your_messagbird_access_key' #live key 52 | MESSAGEBIRD_WHATSAPP_CHANNEL_ID='your_messagebird_whatsapp_channel_id' 53 | MESSAGEBIRD_TEMPLATE_NAMESPACE_ID='your_messagebird_template_namespace_id' 54 | 55 | #SENDGRID CONFIGURATION 56 | SENDGRID_API_KEY='your_sendgrid_api_key' 57 | SENDGRID_SENDER_EMAIL='your_sendgrid_email_sender' 58 | 59 | #TWILIO CONFIGURATION 60 | TWILIO_ACCOUNT_SID='your_twilio_account_sid' 61 | TWILIO_AUTH_TOKEN='your_twilio_account_token' 62 | TWILIO_PHONE_NUMBER='+your_phone_number' 63 | 64 | 65 | #PUB/SUB TOPICS 66 | TOPIC_NAME='your_pubbus_topic_name' 67 | SUBSCRIPTION_NAME='your_pubsub_subscription_name' 68 | -------------------------------------------------------------------------------- /.eslintignore: -------------------------------------------------------------------------------- 1 | build/ 2 | node_modules/ 3 | docs/ 4 | -------------------------------------------------------------------------------- /.eslintrc.json: -------------------------------------------------------------------------------- 1 | { 2 | "extends": "./node_modules/gts/", 3 | "rules": { 4 | "no-process-exit": "off" 5 | } 6 | } 7 | -------------------------------------------------------------------------------- /.gcloudignore: -------------------------------------------------------------------------------- 1 | .gcloudignore 2 | .git 3 | .gitignore 4 | node_modules/ 5 | #!include:.gitignore 6 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/bug_report.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: Bug report 3 | about: Create a report to help us improve 4 | title: '' 5 | labels: '' 6 | assignees: '' 7 | 8 | --- 9 | 10 | **Describe the bug** 11 | A clear and concise description of what the bug is. 12 | 13 | **To Reproduce** 14 | Steps to reproduce the behavior: 15 | 1. Go to '...' 16 | 2. Click on '....' 17 | 3. Scroll down to '....' 18 | 4. See error 19 | 20 | **Expected behavior** 21 | A clear and concise description of what you expected to happen. 22 | 23 | **Screenshots** 24 | If applicable, add screenshots to help explain your problem. 25 | 26 | **Desktop (please complete the following information):** 27 | - OS: [e.g. iOS] 28 | - Browser [e.g. chrome, safari] 29 | - Version [e.g. 22] 30 | 31 | **Smartphone (please complete the following information):** 32 | - Device: [e.g. iPhone6] 33 | - OS: [e.g. iOS8.1] 34 | - Browser [e.g. stock browser, safari] 35 | - Version [e.g. 22] 36 | 37 | **Additional context** 38 | Add any other context about the problem here. 39 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/feature_request.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: Feature request 3 | about: Suggest an idea for this project 4 | title: '' 5 | labels: '' 6 | assignees: '' 7 | 8 | --- 9 | 10 | **Is your feature request related to a problem? Please describe.** 11 | A clear and concise description of what the problem is. Ex. I'm always frustrated when [...] 12 | 13 | **Describe the solution you'd like** 14 | A clear and concise description of what you want to happen. 15 | 16 | **Describe alternatives you've considered** 17 | A clear and concise description of any alternative solutions or features you've considered. 18 | 19 | **Additional context** 20 | Add any other context or screenshots about the feature request here. 21 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Logs 2 | logs 3 | *.log 4 | npm-debug.log* 5 | yarn-debug.log* 6 | yarn-error.log* 7 | lerna-debug.log* 8 | .pnpm-debug.log* 9 | 10 | # Diagnostic reports (https://nodejs.org/api/report.html) 11 | report.[0-9]*.[0-9]*.[0-9]*.[0-9]*.json 12 | 13 | # Runtime data 14 | pids 15 | *.pid 16 | *.seed 17 | *.pid.lock 18 | 19 | # Directory for instrumented libs generated by jscoverage/JSCover 20 | lib-cov 21 | 22 | # Coverage directory used by tools like istanbul 23 | coverage 24 | *.lcov 25 | 26 | # nyc test coverage 27 | .nyc_output 28 | 29 | # Grunt intermediate storage (https://gruntjs.com/creating-plugins#storing-task-files) 30 | .grunt 31 | 32 | # Bower dependency directory (https://bower.io/) 33 | bower_components 34 | 35 | # node-waf configuration 36 | .lock-wscript 37 | 38 | # Compiled binary addons (https://nodejs.org/api/addons.html) 39 | build/Release 40 | 41 | # Dependency directories 42 | node_modules/ 43 | jspm_packages/ 44 | 45 | # Snowpack dependency directory (https://snowpack.dev/) 46 | web_modules/ 47 | 48 | # TypeScript cache 49 | *.tsbuildinfo 50 | 51 | # Optional npm cache directory 52 | .npm 53 | 54 | # Optional eslint cache 55 | .eslintcache 56 | 57 | # Optional stylelint cache 58 | .stylelintcache 59 | 60 | # Microbundle cache 61 | .rpt2_cache/ 62 | .rts2_cache_cjs/ 63 | .rts2_cache_es/ 64 | .rts2_cache_umd/ 65 | 66 | # Optional REPL history 67 | .node_repl_history 68 | 69 | # Output of 'npm pack' 70 | *.tgz 71 | 72 | # Yarn Integrity file 73 | .yarn-integrity 74 | 75 | # dotenv environment variable files 76 | .env 77 | .env.development.local 78 | .env.test.local 79 | .env.production.local 80 | .env.local 81 | 82 | # parcel-bundler cache (https://parceljs.org/) 83 | .cache 84 | .parcel-cache 85 | 86 | # Next.js build output 87 | .next 88 | out 89 | 90 | # Nuxt.js build / generate output 91 | .nuxt 92 | dist 93 | 94 | # Gatsby files 95 | .cache/ 96 | # Comment in the public line in if your project uses Gatsby and not Next.js 97 | # https://nextjs.org/blog/next-9-1#public-directory-support 98 | # public 99 | 100 | # vuepress build output 101 | .vuepress/dist 102 | 103 | # vuepress v2.x temp and cache directory 104 | .temp 105 | .cache 106 | 107 | # Docusaurus cache and generated files 108 | .docusaurus 109 | 110 | # Serverless directories 111 | .serverless/ 112 | 113 | # FuseBox cache 114 | .fusebox/ 115 | 116 | # DynamoDB Local files 117 | .dynamodb/ 118 | 119 | # TernJS port file 120 | .tern-port 121 | 122 | # Stores VSCode versions used for testing VSCode extensions 123 | .vscode-test 124 | 125 | # yarn v2 126 | .yarn/cache 127 | .yarn/unplugged 128 | .yarn/build-state.yml 129 | .yarn/install-state.gz 130 | .pnp.* 131 | 132 | 133 | build 134 | docs 135 | docs/ 136 | 137 | 138 | src/config/gcloud/google-web-client-secret.json 139 | src/config/gcloud/google-application-credentials.json 140 | target/ 141 | -------------------------------------------------------------------------------- /.prettierignore: -------------------------------------------------------------------------------- 1 | node_modules 2 | build 3 | dist 4 | -------------------------------------------------------------------------------- /.prettierrc.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | ...require('gts/.prettierrc.json') 3 | } 4 | -------------------------------------------------------------------------------- /CODE_OF_CONDUCT.md: -------------------------------------------------------------------------------- 1 | # Contributor Covenant Code of Conduct 2 | 3 | ## Our Pledge 4 | 5 | In the interest of fostering an open and welcoming environment, we as 6 | contributors and maintainers pledge to make participation in our project and 7 | our community a harassment-free experience for everyone, regardless of age, body 8 | size, disability, ethnicity, sex characteristics, gender identity and expression, 9 | level of experience, education, socio-economic status, nationality, personal 10 | appearance, race, religion, or sexual identity and orientation. 11 | 12 | ## Our Standards 13 | 14 | Examples of behavior that contributes to creating a positive environment 15 | include: 16 | 17 | - Using welcoming and inclusive language 18 | - Being respectful of differing viewpoints and experiences 19 | - Gracefully accepting constructive criticism 20 | - Focusing on what is best for the community 21 | - Showing empathy towards other community members 22 | 23 | Examples of unacceptable behavior by participants include: 24 | 25 | - The use of sexualized language or imagery and unwelcome sexual attention or 26 | advances 27 | - Trolling, insulting/derogatory comments, and personal or political attacks 28 | - Public or private harassment 29 | - Publishing others' private information, such as a physical or electronic 30 | address, without explicit permission 31 | - Other conduct which could reasonably be considered inappropriate in a 32 | professional setting 33 | 34 | ## Our Responsibilities 35 | 36 | Project maintainers are responsible for clarifying the standards of acceptable 37 | behavior and are expected to take appropriate and fair corrective action in 38 | response to any instances of unacceptable behavior. 39 | 40 | Project maintainers have the right and responsibility to remove, edit, or 41 | reject comments, commits, code, wiki edits, issues, and other contributions 42 | that are not aligned to this Code of Conduct, or to ban temporarily or 43 | permanently any contributor for other behaviors that they deem inappropriate, 44 | threatening, offensive, or harmful. 45 | 46 | ## Scope 47 | 48 | This Code of Conduct applies within all project spaces, and it also applies when 49 | an individual is representing the project or its community in public spaces. 50 | Examples of representing a project or community include using an official 51 | project e-mail address, posting via an official social media account, or acting 52 | as an appointed representative at an online or offline event. Representation of 53 | a project may be further defined and clarified by project maintainers. 54 | 55 | ## Enforcement 56 | 57 | Instances of abusive, harassing, or otherwise unacceptable behavior may be 58 | reported by contacting the project team at [INSERT EMAIL ADDRESS]. All 59 | complaints will be reviewed and investigated and will result in a response that 60 | is deemed necessary and appropriate to the circumstances. The project team is 61 | obligated to maintain confidentiality with regard to the reporter of an incident. 62 | Further details of specific enforcement policies may be posted separately. 63 | 64 | Project maintainers who do not follow or enforce the Code of Conduct in good 65 | faith may face temporary or permanent repercussions as determined by other 66 | members of the project's leadership. 67 | 68 | ## Attribution 69 | 70 | This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 1.4, 71 | available at https://www.contributor-covenant.org/version/1/4/code-of-conduct.html 72 | 73 | [homepage]: https://www.contributor-covenant.org 74 | 75 | For answers to common questions about this code of conduct, see 76 | https://www.contributor-covenant.org/faq 77 | -------------------------------------------------------------------------------- /CONTRIBUTING.md: -------------------------------------------------------------------------------- 1 | # Contributing to Project 2 | 3 | All contributions are welcome! 4 | 5 | For contributing to this project, please: 6 | * fork the repository to your own account 7 | * clone the repository 8 | * make changes 9 | * submit a pull request on `development` branch 10 | -------------------------------------------------------------------------------- /Dockerfile: -------------------------------------------------------------------------------- 1 | FROM node:18-alpine as base 2 | 3 | WORKDIR /usr/src/app 4 | EXPOSE 3000 5 | 6 | FROM base as builder 7 | COPY ["package.json", "package-lock.json*", "./"] 8 | COPY ./tsconfig.json ./tsconfig.json 9 | COPY ./src ./src 10 | RUN npm ci --only-production 11 | RUN npm run compile 12 | RUN npm prune --production 13 | 14 | FROM base as release 15 | ENV NODE_ENV=production 16 | USER node 17 | COPY --chown=node:node --from=builder /usr/src/app/node_modules ./node_modules 18 | COPY --chown=node:node --from=builder /usr/src/app/build ./build 19 | COPY --chown=node:node . /usr/src/app 20 | CMD ["node", "./build/src/bin/server"] 21 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2023 Giuseppe Albrizio 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NON INFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | [![CircleCI](https://dl.circleci.com/status-badge/img/gh/giuseppealbrizio/typescript-rest-api-backend/tree/main.svg?style=svg&circle-token=a73f0879b6f17258a912820c3082a572d49d4ff6)](https://dl.circleci.com/status-badge/redirect/gh/giuseppealbrizio/typescript-rest-api-backend/tree/main) 2 | 3 | [![Code Style: Google](https://img.shields.io/badge/code%20style-google-blueviolet.svg)](https://github.com/google/gts) 4 | [![License: MIT](https://img.shields.io/badge/License-MIT-yellow.svg)](https://opensource.org/licenses/MIT) 5 | 6 | [![TypeScript](https://img.shields.io/badge/TypeScript-007ACC?style=for-the-badge&logo=typescript&logoColor=white)](https://github.com/Envoy-VC/awesome-badges) 7 | [![Kubernets](https://img.shields.io/badge/kubernetes-%23326ce5.svg?style=for-the-badge&logo=kubernetes&logoColor=white)](https://github.com/Envoy-VC/awesome-badges) 8 | 9 | # Typescript REST API Backend Template 10 | 11 | ## Feel free to support this project 12 | 13 | If you found this project helpful, please consider supporting me by buying me a coffee! Your support will help me to keep creating more useful content and improving this project. 14 | 15 | [!["Buy Me A Coffee"](https://www.buymeacoffee.com/assets/img/custom_images/orange_img.png)](https://www.buymeacoffee.com/galbrizio) 16 | 17 | --- 18 | 19 | #### Typescript REST microservice boilerplate using node.js and express and some other cool stuff 20 | 21 | This template is intended to be used as single service in a REST multi-service application using Cloud Pub/Sub as 22 | message broker 23 | 24 | use in local with Skaffold and in cloud with GKE 25 | 26 | To know more about how to implement GKE and run with Skaffold please refer to this folder: 27 | 28 | `./infra` 29 | 30 | The application uses express as framework and is configured with the following features: 31 | 32 | - `ECMA2022` features enabled 33 | - `Dotenv` Load environment variables from .env file 34 | - `Eslint` Code quality tool 35 | - `Prettier` to prettify the code 36 | - `MongoDB` ready to go configuration with mongoose 37 | - `MySQL` ready to go configuration with mysql2 38 | - `CORS` feature enabled 39 | - `RBAC` logic to authorize people with specific roles to use the endpoints. 40 | - `Passport` logic to add an authentication layer if neeeded. 41 | - `Sparkpost` email service support with sparkpost. 42 | - `Error Handling` errors custom middleware and helpers globally configured 43 | - `Multer` File uploading configured to use in routes as middleware 44 | - `Google Cloud Storage` middleware configured to use Google Cloud Storage as upload bucket 45 | - `Google Cloud Pub/Sub` pub/sub support for event driven events added 46 | - `Axios` globally configured in `./src/utils/api.utils.js` 47 | - `Swagger` documentation reachable at `http://localhost:3000/api/v1/docs` 48 | - `Jest` Testing tool support 49 | - `Logger` Logging support with Winston 50 | - `Docker` ready configuration with multi-stage option 51 | - `Terraform` ready configuration to instantiate infrastracture in GCP 52 | - `Agenda` ready to emit events through agenda jobs 53 | - `Best practices` in naming files 54 | 55 | ## Basic Information 56 | 57 | - App entry point is located in `./src/index.ts` 58 | 59 | - Server config entrypoint is located in `./src/bin/server.ts` 60 | 61 | - Prettier config is located at `./.prettierrc.js` 62 | 63 | - Eslint config is located at `./.eslintrc` 64 | 65 | - Sparkpost service support is located at `./src/services/email/sparkport.service.ts` 66 | 67 | - You can define your own email services in this file 68 | 69 | - Mongo config is located at `./src/config/mongodb.config.ts` 70 | 71 | - MYSQL config is located at `./src/config/mysql.config.ts` 72 | 73 | - Error Handling middleware is located at `./src/middlewares/errorHandler.middleware.ts` 74 | 75 | - You can configure as many errors you need in `./src/errors/` 76 | 77 | - Multer middleware is located at `./src/middlewares/upload.middleware.ts` 78 | 79 | - If you want to use Google Cloud Storage as upload bucket follow instructions at `./src/config/gcloud/README.md` 80 | 81 | - RBAC logic middleware is located at `./src/middlewares/verifyApiRights.middleware.ts` 82 | 83 | - Swagger config file is located at `./src/api/swagger/swagger.route.js` 84 | 85 | - Swagger routes are defined in `./src/api/swagger/swagger.route.ts` 86 | 87 | - Docker config is located at `./Dockerfile` 88 | 89 | - Pub/Sub service is located at `./src/services/pubsub/pub-sub.service.js` 90 | 91 | ## Folder Structure 92 | 93 | > `infra/` 94 | > 95 | > - **For more information about the k8s configuration please check the README file** 96 | > - **`k8s`** - folder contains all production kubernetes manifests 97 | > - **`k8s-dev`** - folder contains all development kubernetes manifests to run with skaffold 98 | > - **`scripts`** - older contains all script related to the creation of a cluster or running skaffold or secret 99 | > creation 100 | > 101 | > `src/` 102 | > 103 | > - **`api/`** - containing all api logic with model, services, controller and routes 104 | > - **`bin/`** - server configuration folder 105 | > - **`config/`** - this folder contains all the configs file (database, passport, etc...) 106 | > - **`constants/`** - this folder contains all the global constants 107 | > - **`logs/`** - the logger file will be stored here 108 | > - **`helpers/`** - some helpers func i.e. an error helper that returns json everytime an error comes in 109 | > - **`middlewares/`** - here you can find all the custom middlewares 110 | > - **`services/`** - here we store all the services; i.e. here we define methods to manipulate a db model entity 111 | > - **`tests/`** - here we store all the jest test 112 | > - **`utils/`** - containing some utils function to be reused in the code (i.e. axios global configuration) 113 | 114 | ## Getting Started 115 | 116 | Copy the .env.example to .env. Be sure to fill all the global variables. Alternatively you can use the script `generate-env.sh` in the scripts folder. This script will generate a `.env.test.local` and you can copy this file to .env 117 | 118 | ```bash 119 | cp env.example .env 120 | ``` 121 | 122 | Then replace: 123 | 124 | 1. `MONGO_URI` string with your Mongo connection 125 | 1. `MONGO_URI_TEST` string with your Mongo Test connection 126 | 2. `MYSQL_HOST_STAGE` string with your mysql host name 127 | - `MYSQL_USER_STAGE` string with your mysql username 128 | - `MYSQL_PASSWORD_STAGE` string with your mysql password name 129 | - `MYSQL_DB_STAGE` string with your mysql db name 130 | - `MYSQL_SOCKET_STAGE` string with your mysql socket name 131 | 3. `GOOGLE_APPLICATION_CREDENTIALS` path with yours 132 | 4. `GOOGLE_PROJECT_ID` with yours 133 | 5. `SENDGRID_API_KEY` with yours 134 | 6. `SENDGRID_SENDER_EMAIL` with yours 135 | 136 | In order to Google Cloud Storage works follow instructions located in `./src/config/gcloud/README.md` 137 | 138 | --- 139 | 140 | To get started with this repo npm install in the root folder 141 | 142 | ```bash 143 | npm install 144 | ``` 145 | 146 | To getting started with a dev environment. Here we use nodemon and babel-node to restart the server asa we change 147 | something 148 | 149 | ```bash 150 | npm run start:dev 151 | ``` 152 | 153 | To compile the code and create a production build 154 | 155 | ```bash 156 | npm run compile 157 | ``` 158 | 159 | This command will create a build in the root directory 160 | 161 | To start with a production ready build you can run this command 162 | 163 | ```bash 164 | # This set the NODE_ENV to production, npm-run-all, create a build and run the server command 165 | npm run start 166 | ``` 167 | 168 | If you have a build and you want to node the build you can run 169 | 170 | ```bash 171 | # This command launch the node instance inside the ./build/bin/server 172 | npm run server 173 | ``` 174 | 175 | ## Docker Ready 176 | 177 | ### Here we use the multistage build to optimize speed and size of the final image 178 | 179 | If you use Docker and wanna dockerize the app you can run the command 180 | 181 | ```bash 182 | docker build -t /: . 183 | ``` 184 | 185 | then 186 | 187 | ```bash 188 | docker run --name -d - p 3000:3000 /: 189 | ``` 190 | -------------------------------------------------------------------------------- /infra/README.md: -------------------------------------------------------------------------------- 1 | #INFRASTRUCTURE FOLDER 2 | 3 | This folder should be moved to the root folder where all the services are located. 4 | 5 | Replace all the key-value pairs with yours 6 | 7 | - `k8s` folder contains all production kubernetes manifests 8 | - `k8s-dev` folder contains all development kubernetes manifests to run with skaffold 9 | - `scripts` folder contains all script related to the creation of a cluster or running skaffold or secret creation 10 | 11 | ## Skaffold File 12 | 13 | For production environment: `./k8s/skaffold.yaml` 14 | 15 | For development environment: `./k8s-dev/skaffold.yaml` 16 | 17 | Remember to put this file in the root of multi-services project. Depending on the environment, you should specify the 18 | correct skaffold configuration. 19 | 20 | - If you use Docker, you should install NGINX at this link 21 | [NGINX x Docker](https://kubernetes.github.io/ingress-nginx/deploy/) 22 | 23 | ## TASK TO MAKE THIS WORK 24 | 25 | 1. Create a project in GCP 26 | 2. Go to `./scripts/gke-autopilot.sh` and change the with your project id. 27 | 3. Launch the script with `chmod +x gke-autopilot.sh && ./gke-autopilot.sh` 28 | 4. Just in case context is not changed, you should change with `kubectl config use-context ` 29 | 5. Put the file `skaffold.yaml` in your root folder where all the services are located. 30 | 6. For each YAML file change the `project-id`, `servicename` and all other env variables with your 31 | 7. After you changed all the configuration files you can launch skaffold command with `skaffold run` 32 | 33 | ## USEFUL COMANDS 34 | 35 | - Change the context of kubernetes 36 | 37 | ```bash 38 | kubectl config use-context 39 | ``` 40 | 41 | - Build the container in gcloud with the command. In the root where Dockerfile is located 42 | 43 | ```bash 44 | gcloud builds submit --tag gcr.io// . 45 | ``` 46 | 47 | - CREATE SECRET FROM JSON FILE 48 | - google-application-credentials = the name of the secret to be stored 49 | - google-application-credentials.json = the file name and the file will be stored in a volume 50 | - ./google-application-credentials.json = the actual file downloaded and that is in the config folder 51 | 52 | ```bash 53 | kubectl create secret generic google-application-credentials --from-file=google-application-credentials.json=./google-application-credentials.json 54 | ``` 55 | -------------------------------------------------------------------------------- /infra/k8s-dev/deployments/servicename-depl.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: apps/v1 2 | kind: Deployment 3 | metadata: 4 | name: servicename-depl 5 | namespace: default 6 | labels: 7 | app: servicename 8 | spec: 9 | replicas: 1 10 | selector: 11 | matchLabels: 12 | app: servicename 13 | template: 14 | metadata: 15 | labels: 16 | app: servicename 17 | spec: 18 | volumes: 19 | - name: google-cloud-keys 20 | secret: 21 | secretName: google-application-credentials 22 | 23 | - name: proxy-to-another-gcp-project # name of the volumes that contain the proxy to another gcp project 24 | secret: 25 | secretName: proxy-to-another-gcp-project-secret 26 | containers: 27 | - name: servicename 28 | #Local Configuration 29 | image: org_name/project_name/servicename:latest 30 | volumeMounts: 31 | - name: google-cloud-keys 32 | mountPath: /var/secrets/google 33 | env: 34 | #SERVICE CONFIGURATION 35 | - name: HOST 36 | value: '0.0.0.0' 37 | - name: SERVICE_NAME 38 | value: 'your-service-name' 39 | - name: PORT 40 | value: '3000' 41 | - name: HASH 42 | value: '10' 43 | #JWT CONFIGURATION 44 | - name: JWT_KEY 45 | valueFrom: 46 | secretKeyRef: 47 | name: shared-secrets 48 | key: JWT_KEY 49 | - name: SECRET 50 | valueFrom: 51 | secretKeyRef: 52 | name: shared-secrets 53 | key: SECRET 54 | #MONGO CONFIGURATION 55 | - name: MONGO_URI 56 | valueFrom: 57 | secretKeyRef: 58 | name: shared-secrets 59 | key: MONGO_URI_TEST # We use the test one also in mongouri. this happen cause when launch skaffold in local it has node_env production 60 | - name: MONGO_URI_TEST 61 | valueFrom: 62 | secretKeyRef: 63 | name: shared-secrets 64 | key: MONGO_URI_TEST 65 | #GOOGLE CLOUD CONFIGURATION 66 | - name: GOOGLE_APPLICATION_CREDENTIALS 67 | value: '/var/secrets/google/google-application-credentials.json' 68 | - name: GOOGLE_PROJECT_ID 69 | valueFrom: 70 | secretKeyRef: 71 | name: shared-secrets 72 | key: GOOGLE_PROJECT_ID 73 | - name: GOOGLE_CLOUD_PROJECT 74 | valueFrom: 75 | secretKeyRef: 76 | name: shared-secrets 77 | key: GOOGLE_CLOUD_PROJECT 78 | - name: GOOGLE_STORAGE_BUCKET_NAME 79 | valueFrom: 80 | secretKeyRef: 81 | name: shared-secrets 82 | key: GOOGLE_STORAGE_BUCKET_NAME 83 | - name: GOOGLE_CLIENT_ID 84 | valueFrom: 85 | secretKeyRef: 86 | name: shared-secrets 87 | key: GOOGLE_CLIENT_ID 88 | - name: GOOGLE_CLIENT_SECRET 89 | valueFrom: 90 | secretKeyRef: 91 | name: shared-secrets 92 | key: GOOGLE_CLIENT_SECRET 93 | - name: GOOGLE_MAPS_API_KEY 94 | valueFrom: 95 | secretKeyRef: 96 | name: shared-secrets 97 | key: GOOGLE_MAPS_API_KEY 98 | #SPARKPOST CONFIGURATION 99 | - name: SPARKPOST_API_KEY 100 | valueFrom: 101 | secretKeyRef: 102 | name: shared-secrets 103 | key: SPARKPOST_API_KEY 104 | - name: SPARKPOST_SENDER_DOMAIN 105 | valueFrom: 106 | secretKeyRef: 107 | name: shared-secrets 108 | key: SPARKPOST_SENDER_DOMAIN 109 | #MESSAGEBIRD CONFIGURATION 110 | - name: MESSAGEBIRD_ACCESS_KEY 111 | valueFrom: 112 | secretKeyRef: 113 | name: shared-secrets 114 | key: MESSAGEBIRD_ACCESS_KEY 115 | - name: MESSAGEBIRD_WHATSAPP_CHANNEL_ID 116 | valueFrom: 117 | secretKeyRef: 118 | name: shared-secrets 119 | key: MESSAGEBIRD_WHATSAPP_CHANNEL_ID 120 | - name: MESSAGEBIRD_TEMPLATE_NAMESPACE_ID 121 | valueFrom: 122 | secretKeyRef: 123 | name: shared-secrets 124 | key: MESSAGEBIRD_TEMPLATE_NAMESPACE_ID 125 | - name: MESSAGEBIRD_TEMPLATE_NAME_TEST 126 | valueFrom: 127 | secretKeyRef: 128 | name: shared-secrets 129 | key: MESSAGEBIRD_TEMPLATE_NAME_TEST 130 | #MYSQL CONFIGURATION 131 | - name: MYSQL_HOST_STAGE 132 | valueFrom: 133 | secretKeyRef: 134 | name: shared-secrets 135 | key: MYSQL_HOST_STAGE 136 | - name: MYSQL_USER_STAGE 137 | valueFrom: 138 | secretKeyRef: 139 | name: shared-secrets 140 | key: MYSQL_USER_STAGE 141 | - name: MYSQL_PASSWORD_STAGE 142 | valueFrom: 143 | secretKeyRef: 144 | name: shared-secrets 145 | key: MYSQL_PASSWORD_STAGE 146 | - name: MYSQL_DB_STAGE 147 | valueFrom: 148 | secretKeyRef: 149 | name: shared-secrets 150 | key: MYSQL_DB_STAGE 151 | - name: MYSQL_SOCKET_STAGE 152 | value: '/cloudsql/your-socket-name' 153 | 154 | - name: MYSQL_HOST_PROD 155 | value: '127.0.0.1' #we use localhost because we mounted a cloud proxy sql 156 | - name: MYSQL_USER_PROD 157 | valueFrom: 158 | secretKeyRef: 159 | name: shared-secrets 160 | key: MYSQL_USER_PROD 161 | - name: MYSQL_PASSWORD_PROD 162 | valueFrom: 163 | secretKeyRef: 164 | name: shared-secrets 165 | key: MYSQL_PASSWORD_PROD 166 | - name: MYSQL_DB_PROD 167 | valueFrom: 168 | secretKeyRef: 169 | name: shared-secrets 170 | key: MYSQL_DB_PROD 171 | - name: MYSQL_SOCKET_PROD 172 | value: '/cloudsql/your-cloudsql-socket' 173 | 174 | - name: cloud-sql-proxy 175 | # It is recommended to use the latest version of the Cloud SQL proxy 176 | # Make sure to update on a regular schedule! 177 | image: gcr.io/cloud-sql-connectors/cloud-sql-proxy:2.1.0 178 | args: 179 | # If connecting from a VPC-native GKE cluster, you can use the 180 | # following flag to have the proxy connect over private IP 181 | # - "--private-ip" 182 | 183 | # Enable structured logging with LogEntry format: 184 | - "--structured-logs" 185 | 186 | # Defaults: MySQL: 3306, Postgres: 5432, SQLServer: 1433 187 | # Replace DB_PORT with the port the proxy should listen on 188 | - "--port=3306" 189 | - "cloud-sql-instances=instance-name" 190 | 191 | # [START cloud_sql_proxy_k8s_volume_mount] 192 | # This flag specifies where the service account key can be found 193 | - '--credentials-file=/var/secrets/google/proxy-to-another-gcp-project.json' 194 | securityContext: 195 | # The default Cloud SQL proxy image runs as the 196 | # "nonroot" user and group (uid: 65532) by default. 197 | runAsNonRoot: true 198 | volumeMounts: 199 | - name: proxy-to-another-gcp-project 200 | mountPath: /var/secrets/google 201 | readOnly: true 202 | # [END cloud_sql_proxy_k8s_volume_mount] 203 | # Resource configuration depends on an application's requirements. You 204 | # should adjust the following values based on what your application 205 | # needs. For details, see https://kubernetes.io/docs/concepts/configuration/manage-resources-containers/ 206 | resources: 207 | requests: 208 | # The proxy's memory use scales linearly with the number of active 209 | # connections. Fewer open connections will use less memory. Adjust 210 | # this value based on your application's requirements. 211 | memory: '2Gi' 212 | # The proxy's CPU use scales linearly with the amount of IO between 213 | # the database and the application. Adjust this value based on your 214 | # application's requirements. 215 | cpu: '1' 216 | --- 217 | apiVersion: v1 218 | kind: Service 219 | metadata: 220 | name: servicename-srv 221 | spec: 222 | type: ClusterIP 223 | selector: 224 | app: servicename 225 | ports: 226 | - name: servicename 227 | protocol: TCP 228 | port: 3000 229 | targetPort: 3000 230 | -------------------------------------------------------------------------------- /infra/k8s-dev/ingress/ingress-srv.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: networking.k8s.io/v1 2 | kind: Ingress 3 | metadata: 4 | name: ingress-service 5 | annotations: 6 | #Local configuration - Remember to install nginx 7 | kubernetes.io/ingress.class: nginx 8 | nginx.ingress.kubernetes.io/use-regex: 'true' 9 | nginx.ingress.kubernetes.io/enable-cors: 'true' 10 | nginx.ingress.kubernetes.io/cors-allow-methods: 'GET, HEAD, PUT, PATCH, POST, DELETE, OPTIONS' 11 | nginx.ingress.kubernetes.io/cors-allow-origin: 'http://localhost:3000' 12 | nginx.ingress.kubernetes.io/cors-allow-credentials: 'true' 13 | nginx.ingress.kubernetes.io/proxy-body-size: 8m 14 | spec: 15 | rules: 16 | - host: testrestapi.eu.ngrok.io 17 | http: 18 | paths: 19 | # Client implementation of React or a frontend client in general that doesn't have api versioning 20 | - path: /?(.*) 21 | pathType: ImplementationSpecific 22 | backend: 23 | service: 24 | name: clientservicename-srv 25 | port: 26 | number: 3000 27 | 28 | - path: /api/v1/service-1-name/?(.*) 29 | pathType: ImplementationSpecific 30 | backend: 31 | service: 32 | name: servicename-srv 33 | port: 34 | number: 3000 35 | 36 | - path: /api/v1/service-2-name/?(.*) 37 | pathType: ImplementationSpecific 38 | backend: 39 | service: 40 | name: servicename2-srv 41 | port: 42 | number: 3000 43 | -------------------------------------------------------------------------------- /infra/k8s-dev/secrets/google-application-credentials.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: v1 2 | kind: Secret 3 | metadata: 4 | name: google-application-credentials #name of the secret to be mounted 5 | type: Opaque 6 | stringData: #file name that will be created to mount 7 | google-application-credentials.json: | 8 | { 9 | "type": "service_account", 10 | "project_id": "your-project-id", 11 | "private_key_id": "your-private-key-id", 12 | "private_key": "your-private-key", 13 | "client_email": "service-account-email", 14 | "client_id": "your-client-id", 15 | "auth_uri": "https://accounts.google.com/o/oauth2/auth", 16 | "token_uri": "https://oauth2.googleapis.com/token", 17 | "auth_provider_x509_cert_url": "https://www.googleapis.com/oauth2/v1/certs", 18 | "client_x509_cert_url": "client_x509_cert_url" 19 | } 20 | 21 | #The same result can be achieved by using this kubectl command in the folder where google-application-credentials.json is 22 | #kubectl create secret generic google-application-credentials --from-file=google-application-credentials.json=./google-application-credentials.json 23 | -------------------------------------------------------------------------------- /infra/k8s-dev/secrets/proxy-to-another-gcp-project.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: v1 2 | kind: Secret 3 | metadata: 4 | name: proxy-to-another-gcp-project-secret #name of the secret to be mounted 5 | type: Opaque 6 | stringData: #file name that will be created to mount 7 | proxy-to-another-gcp-project.json: | 8 | { 9 | "type": "service_account", 10 | "project_id": "your-project-id", 11 | "private_key_id": "your-private-key-id", 12 | "private_key": "your-private-key", 13 | "client_email": "service-account-email", 14 | "client_id": "your-client-id", 15 | "auth_uri": "https://accounts.google.com/o/oauth2/auth", 16 | "token_uri": "https://oauth2.googleapis.com/token", 17 | "auth_provider_x509_cert_url": "https://www.googleapis.com/oauth2/v1/certs", 18 | "client_x509_cert_url": "client_x509_cert_url" 19 | } 20 | 21 | #The same result can be achieved by using this kubectl command in the folder where google-application-credentials.json is 22 | #kubectl create secret generic google-application-credentials --from-file=google-application-credentials.json=./google-application-credentials.json 23 | -------------------------------------------------------------------------------- /infra/k8s-dev/secrets/shared-secrets.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: v1 2 | kind: Secret 3 | metadata: 4 | name: shared-secrets 5 | data: 6 | #JWT CONFIGURATION 7 | JWT_KEY: 8 | SECRET: 9 | 10 | #MONGODB CONFIGURATION: 11 | MONGO_URI: 12 | MONGO_URI_TEST: 13 | 14 | #GOOGLE CLOUD CONFIGURATION 15 | GOOGLE_PROJECT_ID: 16 | GOOGLE_CLOUD_PROJECT: 17 | GOOGLE_STORAGE_BUCKET_NAME: 18 | GOOGLE_CLIENT_ID: 19 | GOOGLE_CLIENT_SECRET: 20 | GOOGLE_MAPS_API_KEY: 21 | 22 | #SPARKPOST CONFIGURATION 23 | SPARKPOST_API_KEY: #Use test key here 24 | SPARKPOST_SENDER_DOMAIN: 25 | 26 | # MESSAGEBIRD CONFIGURATION 27 | MESSAGEBIRD_ACCESS_KEY: #Use test key here 28 | MESSAGEBIRD_WHATSAPP_CHANNEL_ID: 29 | MESSAGEBIRD_TEMPLATE_NAMESPACE_ID: 30 | MESSAGEBIRD_TEMPLATE_NAME_TEST: 31 | 32 | #MYSQL CONFIGURATION SECRECTS 33 | MYSQL_HOST_STAGE: 34 | MYSQL_USER_STAGE: 35 | MYSQL_PASSWORD_STAGE: 36 | MYSQL_DB_STAGE: 37 | MYSQL_SOCKET_STAGE: #not necessary 38 | 39 | MYSQL_HOST_PROD: 40 | MYSQL_USER_PROD: 41 | MYSQL_PASSWORD_PROD: 42 | MYSQL_DB_PROD: 43 | MYSQL_SOCKET_PROD: #not necessary 44 | 45 | 46 | #kubectl create secret generic jwt-secret --from-literal=JWT_KEY=JWT_SECRET 47 | 48 | #Don't forget to create the google-application-credentials secret with 49 | #kubectl create secret generic google-application-credentials --from-file=google-application-credentials.json=./google-application-credentials.json 50 | -------------------------------------------------------------------------------- /infra/k8s-dev/skaffold.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: skaffold/v4beta1 2 | kind: Config 3 | metadata: 4 | name: project-id #project id 5 | build: 6 | artifacts: 7 | #Local configuration 8 | # Client context of React 9 | - image: org_name/project_name/client-servicename 10 | context: client-service-folder 11 | sync: 12 | manual: 13 | - src: ./src/**/*.ts 14 | dest: . 15 | - src: "***/*.html" 16 | dest: . 17 | - src: "***/*.css" 18 | dest: . 19 | docker: 20 | dockerfile: Dockerfile 21 | # Service 1 context 22 | - image: org_name/project_name/servicename 23 | context: service-folder #folder where codebase is stored 24 | sync: 25 | manual: 26 | - src: src/**/*.ts 27 | dest: . 28 | docker: 29 | dockerfile: Dockerfile 30 | - image: org_name/project_name/servicename2 31 | context: service2-folder 32 | sync: 33 | manual: 34 | - src: src/**/*.ts 35 | dest: . 36 | docker: 37 | dockerfile: Dockerfile 38 | tagPolicy: 39 | sha256: {} #this tag policy uses the tag latest of image 40 | #Local configuration 41 | local: 42 | push: false 43 | manifests: 44 | rawYaml: 45 | - ./infra/k8s-dev/* 46 | deploy: 47 | kubectl: {} 48 | -------------------------------------------------------------------------------- /infra/k8s/certificates/managed-cert.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: networking.gke.io/v1 2 | kind: ManagedCertificate 3 | metadata: 4 | name: project-id-certificate 5 | spec: 6 | domains: 7 | - domainname.com 8 | - api.domainname.com 9 | -------------------------------------------------------------------------------- /infra/k8s/deployments/servicename-depl.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: apps/v1 2 | kind: Deployment 3 | metadata: 4 | name: servicename-depl 5 | namespace: default 6 | labels: 7 | app: servicename 8 | spec: 9 | replicas: 1 10 | selector: 11 | matchLabels: 12 | app: servicename 13 | template: 14 | metadata: 15 | labels: 16 | app: servicename 17 | spec: 18 | volumes: 19 | - name: google-cloud-keys 20 | secret: 21 | secretName: google-application-credentials 22 | 23 | - name: proxy-to-another-gcp-project # name of the volumes that contain the proxy to another gcp project 24 | secret: 25 | secretName: proxy-to-another-gcp-project-secret 26 | containers: 27 | - name: servicename 28 | #Cloud Configuration 29 | image: europe-west1-docker.pkg.dev/your-artifact-repository/servicename:latest 30 | imagePullPolicy: Always 31 | # Liveness Probe Configuration 32 | livenessProbe: 33 | failureThreshold: 3 34 | httpGet: 35 | path: /api/v1/servicename/ 36 | port: 3000 37 | scheme: HTTP 38 | initialDelaySeconds: 60 39 | periodSeconds: 60 40 | successThreshold: 1 41 | timeoutSeconds: 10 42 | # Readiness Probe Configuration 43 | readinessProbe: 44 | failureThreshold: 3 45 | httpGet: 46 | path: /api/v1/servicename/ 47 | port: 3000 48 | scheme: HTTP 49 | initialDelaySeconds: 60 50 | periodSeconds: 60 51 | successThreshold: 1 52 | timeoutSeconds: 10 53 | 54 | volumeMounts: 55 | - name: google-cloud-keys 56 | mountPath: /var/secrets/google 57 | ports: 58 | - containerPort: 3000 59 | env: 60 | #SERVICE CONFIGURATION 61 | - name: HOST 62 | value: '0.0.0.0' 63 | - name: SERVICE_NAME 64 | value: 'your_service_name' 65 | - name: PORT 66 | value: '3000' 67 | - name: HASH 68 | value: '10' 69 | 70 | #JWT CONFIGURATION 71 | - name: JWT_KEY 72 | valueFrom: 73 | secretKeyRef: 74 | name: shared-secrets 75 | key: JWT_KEY 76 | - name: SECRET 77 | valueFrom: 78 | secretKeyRef: 79 | name: shared-secrets 80 | key: SECRET 81 | 82 | #MONGO CONFIGURATION 83 | - name: MONGO_URI 84 | valueFrom: 85 | secretKeyRef: 86 | name: shared-secrets 87 | key: MONGO_URI # We use the test one also in mongouri. this happen cause when launch skaffold in local it has node_env production 88 | - name: MONGO_URI_TEST 89 | valueFrom: 90 | secretKeyRef: 91 | name: shared-secrets 92 | key: MONGO_URI_TEST 93 | 94 | #GOOGLE CLOUD CONFIGURATION 95 | - name: GOOGLE_APPLICATION_CREDENTIALS 96 | value: '/var/secrets/google/google-application-credentials.json' 97 | - name: GOOGLE_PROJECT_ID 98 | valueFrom: 99 | secretKeyRef: 100 | name: shared-secrets 101 | key: GOOGLE_PROJECT_ID 102 | - name: GOOGLE_CLOUD_PROJECT 103 | valueFrom: 104 | secretKeyRef: 105 | name: shared-secrets 106 | key: GOOGLE_CLOUD_PROJECT 107 | - name: GOOGLE_STORAGE_BUCKET_NAME 108 | valueFrom: 109 | secretKeyRef: 110 | name: shared-secrets 111 | key: GOOGLE_STORAGE_BUCKET_NAME 112 | - name: GOOGLE_CLIENT_ID 113 | valueFrom: 114 | secretKeyRef: 115 | name: shared-secrets 116 | key: GOOGLE_CLIENT_ID 117 | - name: GOOGLE_CLIENT_SECRET 118 | valueFrom: 119 | secretKeyRef: 120 | name: shared-secrets 121 | key: GOOGLE_CLIENT_SECRET 122 | - name: GOOGLE_MAPS_API_KEY 123 | valueFrom: 124 | secretKeyRef: 125 | name: shared-secrets 126 | key: GOOGLE_MAPS_API_KEY 127 | 128 | #SPARKPOST CONFIGURATION 129 | - name: SPARKPOST_API_KEY 130 | valueFrom: 131 | secretKeyRef: 132 | name: shared-secrets 133 | key: SPARKPOST_API_KEY 134 | - name: SPARKPOST_SENDER_DOMAIN 135 | valueFrom: 136 | secretKeyRef: 137 | name: shared-secrets 138 | key: SPARKPOST_SENDER_DOMAIN 139 | 140 | #MESSAGEBIRD CONFIGURATION 141 | - name: MESSAGEBIRD_ACCESS_KEY 142 | valueFrom: 143 | secretKeyRef: 144 | name: shared-secrets 145 | key: MESSAGEBIRD_ACCESS_KEY 146 | - name: MESSAGEBIRD_WHATSAPP_CHANNEL_ID 147 | valueFrom: 148 | secretKeyRef: 149 | name: shared-secrets 150 | key: MESSAGEBIRD_WHATSAPP_CHANNEL_ID 151 | - name: MESSAGEBIRD_TEMPLATE_NAMESPACE_ID 152 | valueFrom: 153 | secretKeyRef: 154 | name: shared-secrets 155 | key: MESSAGEBIRD_TEMPLATE_NAMESPACE_ID 156 | - name: MESSAGEBIRD_TEMPLATE_NAME_TEST 157 | valueFrom: 158 | secretKeyRef: 159 | name: shared-secrets 160 | key: MESSAGEBIRD_TEMPLATE_NAME_TEST 161 | 162 | #MYSQL CONFIGURATION 163 | - name: MYSQL_HOST_STAGE 164 | valueFrom: 165 | secretKeyRef: 166 | name: shared-secrets 167 | key: MYSQL_HOST_STAGE 168 | - name: MYSQL_USER_STAGE 169 | valueFrom: 170 | secretKeyRef: 171 | name: shared-secrets 172 | key: MYSQL_USER_STAGE 173 | - name: MYSQL_PASSWORD_STAGE 174 | valueFrom: 175 | secretKeyRef: 176 | name: shared-secrets 177 | key: MYSQL_PASSWORD_STAGE 178 | - name: MYSQL_DB_STAGE 179 | valueFrom: 180 | secretKeyRef: 181 | name: shared-secrets 182 | key: MYSQL_DB_STAGE 183 | - name: MYSQL_SOCKET_STAGE 184 | value: '/cloudsql/your-socket-name' 185 | 186 | - name: MYSQL_HOST_PROD 187 | value: '127.0.0.1' #we use localhost because we mounted a cloud proxy sql 188 | - name: MYSQL_USER_PROD 189 | valueFrom: 190 | secretKeyRef: 191 | name: shared-secrets 192 | key: MYSQL_USER_PROD 193 | - name: MYSQL_PASSWORD_PROD 194 | valueFrom: 195 | secretKeyRef: 196 | name: shared-secrets 197 | key: MYSQL_PASSWORD_PROD 198 | - name: MYSQL_DB_PROD 199 | valueFrom: 200 | secretKeyRef: 201 | name: shared-secrets 202 | key: MYSQL_DB_PROD 203 | - name: MYSQL_SOCKET_PROD 204 | value: '/cloudsql/your-socket-name' 205 | 206 | - name: cloud-sql-proxy 207 | # It is recommended to use the latest version of the Cloud SQL proxy 208 | # Make sure to update on a regular schedule! 209 | image: gcr.io/cloud-sql-connectors/cloud-sql-proxy:2.1.0 210 | args: 211 | # If connecting from a VPC-native GKE cluster, you can use the 212 | # following flag to have the proxy connect over private IP 213 | # - "--private-ip" 214 | 215 | # Enable structured logging with LogEntry format: 216 | - '--structured-logs' 217 | 218 | # Defaults: MySQL: 3306, Postgres: 5432, SQLServer: 1433 219 | # Replace DB_PORT with the port the proxy should listen on 220 | - '--port=3306' 221 | - 'cloud-sql-instances=instance-name' 222 | 223 | # [START cloud_sql_proxy_k8s_volume_mount] 224 | # This flag specifies where the service account key can be found 225 | - '--credentials-file=/var/secrets/google/proxy-to-another-gcp-project.json' 226 | securityContext: 227 | # The default Cloud SQL proxy image runs as the 228 | # "nonroot" user and group (uid: 65532) by default. 229 | runAsNonRoot: true 230 | volumeMounts: 231 | - name: proxy-to-another-gcp-project 232 | mountPath: /var/secrets/google 233 | readOnly: true 234 | # [END cloud_sql_proxy_k8s_volume_mount] 235 | # Resource configuration depends on an application's requirements. You 236 | # should adjust the following values based on what your application 237 | # needs. For details, see https://kubernetes.io/docs/concepts/configuration/manage-resources-containers/ 238 | resources: 239 | requests: 240 | # The proxy's memory use scales linearly with the number of active 241 | # connections. Fewer open connections will use less memory. Adjust 242 | # this value based on your application's requirements. 243 | memory: '2Gi' 244 | # The proxy's CPU use scales linearly with the amount of IO between 245 | # the database and the application. Adjust this value based on your 246 | # application's requirements. 247 | cpu: '1' 248 | --- 249 | apiVersion: v1 250 | kind: Service 251 | metadata: 252 | name: servicename-srv 253 | spec: 254 | # type: ClusterIP 255 | type: NodePort 256 | selector: 257 | app: servicename 258 | ports: 259 | - name: servicename 260 | protocol: TCP 261 | port: 3000 262 | targetPort: 3000 263 | -------------------------------------------------------------------------------- /infra/k8s/ingress/ingress-srv.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: networking.k8s.io/v1 2 | kind: Ingress 3 | metadata: 4 | name: ingress-service 5 | annotations: 6 | #Cloud Configuration 7 | kubernetes.io/ingress.class: gce 8 | kubernetes.io/ingress.global-static-ip-name: project-id-static-ip 9 | networking.gke.io/managed-certificates: project-id-certificate 10 | spec: 11 | rules: 12 | - host: domainname.com 13 | http: 14 | paths: 15 | - path: /* 16 | pathType: ImplementationSpecific 17 | backend: 18 | service: 19 | name: clientservicename-srv 20 | port: 21 | number: 3000 22 | 23 | - path: /api/v1/servicename/* 24 | pathType: ImplementationSpecific 25 | backend: 26 | service: 27 | name: servicename-srv 28 | port: 29 | number: 3000 30 | 31 | - path: /api/v1/servicename2/* 32 | pathType: ImplementationSpecific 33 | backend: 34 | service: 35 | name: servicename2-srv 36 | port: 37 | number: 3000 38 | -------------------------------------------------------------------------------- /infra/k8s/secrets/google-application-credentials.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: v1 2 | kind: Secret 3 | metadata: 4 | name: google-application-credentials #name of the secret to be mounted 5 | type: Opaque 6 | stringData: #file name that will be created to mount 7 | google-application-credentials.json: | 8 | { 9 | "type": "service_account", 10 | "project_id": "your-project-id", 11 | "private_key_id": "your-private-key-id", 12 | "private_key": "your-private-key", 13 | "client_email": "service-account-email", 14 | "client_id": "your-client-id", 15 | "auth_uri": "https://accounts.google.com/o/oauth2/auth", 16 | "token_uri": "https://oauth2.googleapis.com/token", 17 | "auth_provider_x509_cert_url": "https://www.googleapis.com/oauth2/v1/certs", 18 | "client_x509_cert_url": "client_x509_cert_url" 19 | } 20 | 21 | #The same result can be achieved by using this kubectl command in the folder where google-application-credentials.json is 22 | #kubectl create secret generic google-application-credentials --from-file=google-application-credentials.json=./google-application-credentials.json 23 | -------------------------------------------------------------------------------- /infra/k8s/secrets/proxy-to-another-gcp-project.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: v1 2 | kind: Secret 3 | metadata: 4 | name: proxy-to-another-gcp-project-secret #name of the secret to be mounted 5 | type: Opaque 6 | stringData: #file name that will be created to mount 7 | proxy-to-another-gcp-project.json: | 8 | { 9 | "type": "service_account", 10 | "project_id": "your-project-id", 11 | "private_key_id": "your-private-key-id", 12 | "private_key": "your-private-key", 13 | "client_email": "service-account-email", 14 | "client_id": "your-client-id", 15 | "auth_uri": "https://accounts.google.com/o/oauth2/auth", 16 | "token_uri": "https://oauth2.googleapis.com/token", 17 | "auth_provider_x509_cert_url": "https://www.googleapis.com/oauth2/v1/certs", 18 | "client_x509_cert_url": "client_x509_cert_url" 19 | } 20 | 21 | #The same result can be achieved by using this kubectl command in the folder where google-application-credentials.json is 22 | #kubectl create secret generic google-application-credentials --from-file=google-application-credentials.json=./google-application-credentials.json 23 | -------------------------------------------------------------------------------- /infra/k8s/secrets/shared-secrets.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: v1 2 | kind: Secret 3 | metadata: 4 | name: shared-secrets 5 | data: 6 | #JWT CONFIGURATION 7 | JWT_KEY: 8 | SECRET: 9 | 10 | #MONGODB CONFIGURATION: 11 | MONGO_URI: 12 | MONGO_URI_TEST: 13 | 14 | #GOOGLE CLOUD CONFIGURATION 15 | GOOGLE_PROJECT_ID: 16 | GOOGLE_CLOUD_PROJECT: 17 | GOOGLE_STORAGE_BUCKET_NAME: 18 | GOOGLE_CLIENT_ID: 19 | GOOGLE_CLIENT_SECRET: 20 | GOOGLE_MAPS_API_KEY: 21 | 22 | #SPARKPOST CONFIGURATION 23 | SPARKPOST_API_KEY: #Use test key here 24 | SPARKPOST_SENDER_DOMAIN: 25 | 26 | # MESSAGEBIRD CONFIGURATION 27 | MESSAGEBIRD_ACCESS_KEY: #Use test key here 28 | MESSAGEBIRD_WHATSAPP_CHANNEL_ID: 29 | MESSAGEBIRD_TEMPLATE_NAMESPACE_ID: 30 | MESSAGEBIRD_TEMPLATE_NAME_TEST: 31 | 32 | #MYSQL CONFIGURATION SECRECTS 33 | MYSQL_HOST_STAGE: 34 | MYSQL_USER_STAGE: 35 | MYSQL_PASSWORD_STAGE: 36 | MYSQL_DB_STAGE: 37 | MYSQL_SOCKET_STAGE: #not necessary 38 | 39 | MYSQL_HOST_PROD: 40 | MYSQL_USER_PROD: 41 | MYSQL_PASSWORD_PROD: 42 | MYSQL_DB_PROD: 43 | MYSQL_SOCKET_PROD: #not necessary 44 | 45 | 46 | #kubectl create secret generic jwt-secret --from-literal=JWT_KEY=JWT_SECRET 47 | 48 | #Don't forget to create the google-application-credentials secret with 49 | #kubectl create secret generic google-application-credentials --from-file=google-application-credentials.json=./google-application-credentials.json 50 | -------------------------------------------------------------------------------- /infra/k8s/skaffold.yaml: -------------------------------------------------------------------------------- 1 | # SKAFFOLD CONFIGURATION FOR PRODUCTION 2 | apiVersion: skaffold/v4beta1 3 | kind: Config 4 | metadata: 5 | name: project-id #project id 6 | build: 7 | artifacts: 8 | # Client context of React 9 | - image: europe-west1-docker.pkg.dev/your_artifact_url/client-servicename 10 | context: client-service 11 | sync: 12 | manual: 13 | - src: ./src/**/*.ts 14 | dest: . 15 | - src: "***/*.html" 16 | dest: . 17 | - src: "***/*.css" 18 | dest: . 19 | docker: 20 | dockerfile: Dockerfile 21 | - image: europe-west1-docker.pkg.dev/your_artifact_url/servicename 22 | context: service-folder 23 | sync: 24 | manual: 25 | - src: ./src/**/*.ts 26 | dest: . 27 | docker: 28 | dockerfile: Dockerfile 29 | tagPolicy: 30 | sha256: {} 31 | googleCloudBuild: 32 | projectId: your-google-cloud-project-id 33 | manifests: 34 | rawYaml: 35 | - ./infra/k8s/* 36 | deploy: 37 | kubectl: {} 38 | -------------------------------------------------------------------------------- /infra/terraform/.gitignore: -------------------------------------------------------------------------------- 1 | # Local .terraform directories 2 | **/.terraform/* 3 | .terraform/ 4 | 5 | 6 | # .tfstate files 7 | *.tfstate 8 | *.tfstate.* 9 | 10 | # Crash log files 11 | crash.log 12 | crash.*.log 13 | 14 | # Exclude all .tfvars files, which are likely to contain sensitive data, such as 15 | # password, private keys, and other secrets. These should not be part of version 16 | # control as they are data points which are potentially sensitive and subject 17 | # to change depending on the environment. 18 | *.tfvars 19 | *.tfvars.json 20 | 21 | # Ignore override files as they are usually used to override resources locally and so 22 | # are not checked in 23 | override.tf 24 | override.tf.json 25 | *_override.tf 26 | *_override.tf.json 27 | 28 | # Include override files you do wish to add to version control using negated pattern 29 | # !example_override.tf 30 | 31 | # Include tfplan files to ignore the plan output of command: terraform plan -out=tfplan 32 | # example: *tfplan* 33 | 34 | # Ignore CLI configuration files 35 | .terraformrc 36 | terraform.rc 37 | -------------------------------------------------------------------------------- /infra/terraform/.terraform.lock.hcl: -------------------------------------------------------------------------------- 1 | # This file is maintained automatically by "terraform init". 2 | # Manual edits may be lost in future updates. 3 | 4 | provider "registry.terraform.io/hashicorp/google" { 5 | version = "4.55.0" 6 | constraints = "4.55.0" 7 | hashes = [ 8 | "h1:GMfPJSl9+PS3tHmHmUMo/4CkJ9/tHvZwV2aVp050Fcc=", 9 | "zh:0a82a76dc4bbe05418075f88830f73ad3ca9d56d83a172faaf3306b016219d52", 10 | "zh:367e3c0ce96ab8f9ec3e1fab5a4f9a48b3b5b336622b36b828f75bf6fb663001", 11 | "zh:51fd41c7508c4c39830e5c2885bc053e90d5d24fc90462235b69394185b7fa1d", 12 | "zh:7ebe62261c522631d22ab06951d0d6a1bf629b98aea5d9fe2e2e50ca256cf395", 13 | "zh:9dd119eca735471d61fe9e4cc45e8c257275e2e9f4da30fba7296fc7ae8de99e", 14 | "zh:a4426a0d24dcf8b3899e17530fabb3fb5791ff7db65404c26e66b031a8422bd2", 15 | "zh:c1e93a786b6d014610c3f83fda12b3044009947f729b2042635fa66d9f387c47", 16 | "zh:ea0703ee2f5e3732077e946cfa5cdd85119ef4ecc898a2affdeef9de9f92fe4e", 17 | "zh:ecada51dd406f46e9fce7dafb0b8ef3a671b8d572dbc1d39d9fdc137029f5275", 18 | "zh:effb91791080a86ff130b517bce5253aed1372ad2c6f9cfb252375a196b9f730", 19 | "zh:f1885b811a31e37d53bd780d2485c19754ee2db0a66affeb5e788aa9b1950b8c", 20 | "zh:f569b65999264a9416862bca5cd2a6177d94ccb0424f3a4ef424428912b9cb3c", 21 | ] 22 | } 23 | -------------------------------------------------------------------------------- /infra/terraform/README.md: -------------------------------------------------------------------------------- 1 | REMEMBER TO ADD A FILE CALLED `terraform.tfvars` with the following options: 2 | 3 | with the following options 4 | 5 | ``` 6 | region = "europe-west1" 7 | zone = "europe-west1-b" 8 | location = "EU" 9 | project = "development-test-skeldon" 10 | environment = "prod" 11 | app_name = "test-rest-api-app" 12 | ``` 13 | 14 | Then run terraform commands as usual. 15 | 16 | ## Terraform commands could be: 17 | 18 | ```bash 19 | terraform init # only the first time 20 | terraform fmt # to format the code 21 | terraform validate # to validate the code 22 | terraform plan # to see what will be created 23 | terraform apply # to create the infrastructure 24 | ``` 25 | -------------------------------------------------------------------------------- /infra/terraform/compute.tf: -------------------------------------------------------------------------------- 1 | resource "google_container_cluster" "app_cluster" { 2 | name = "${var.app_name}-cluster-${var.environment}" 3 | location = var.region 4 | ip_allocation_policy { 5 | } 6 | enable_autopilot = true 7 | } 8 | 9 | resource "google_compute_global_address" "external_static_ip" { 10 | name = "${var.app_name}-ingress-static-ip" 11 | address_type = "EXTERNAL" 12 | ip_version = "IPV4" 13 | project = var.project 14 | description = "External static IP address for app" 15 | } 16 | 17 | output "external_static_ip" { 18 | value = google_compute_global_address.external_static_ip.address 19 | description = "External static IP address for app" 20 | } 21 | -------------------------------------------------------------------------------- /infra/terraform/main.tf: -------------------------------------------------------------------------------- 1 | terraform { 2 | required_providers { 3 | google = { 4 | source = "hashicorp/google" 5 | version = "4.55.0" 6 | } 7 | } 8 | } 9 | 10 | provider "google" { 11 | project = var.project 12 | region = var.region 13 | zone = var.zone 14 | } 15 | -------------------------------------------------------------------------------- /infra/terraform/registry.tf: -------------------------------------------------------------------------------- 1 | resource "google_artifact_registry_repository" "repo" { 2 | location = "europe-west1" 3 | repository_id = "${var.app_name}-artifact-repository" 4 | description = "Artifact repository created by Terraform" 5 | format = "DOCKER" 6 | } 7 | 8 | output "artifact_registry_name" { 9 | value = google_artifact_registry_repository.repo.name 10 | description = "Artifact registry name" 11 | } 12 | -------------------------------------------------------------------------------- /infra/terraform/storage.tf: -------------------------------------------------------------------------------- 1 | resource "google_storage_bucket" "prod-bucket" { 2 | name = "${var.app_name}-bucket-${var.environment}" 3 | location = var.region 4 | project = var.project 5 | storage_class = "STANDARD" 6 | uniform_bucket_level_access = false 7 | # versioning { 8 | # enabled = true 9 | # } 10 | # lifecycle_rule { 11 | # action { 12 | # type = "Delete" 13 | # storage_class = "NEARLINE" 14 | # } 15 | # condition { 16 | # age = 30 17 | # } 18 | # } 19 | } 20 | 21 | output "prod_bucket_name" { 22 | value = google_storage_bucket.prod-bucket.name 23 | description = "Prod Bucket name" 24 | } 25 | -------------------------------------------------------------------------------- /infra/terraform/variables.tf: -------------------------------------------------------------------------------- 1 | variable "region" {} 2 | variable "zone" {} 3 | variable "location" {} 4 | variable "project" {} 5 | variable "environment" {} 6 | variable "app_name" {} 7 | -------------------------------------------------------------------------------- /jest.config.js: -------------------------------------------------------------------------------- 1 | /** @type {import('ts-jest').JestConfigWithTsJest} */ 2 | module.exports = { 3 | preset: 'ts-jest', 4 | testEnvironment: 'node', 5 | clearMocks: true, 6 | // roots: ['/src'], 7 | collectCoverage: true, 8 | collectCoverageFrom: ['src/**/([a-zA-Z_]*).{js,ts}', '!**/*.test.{js,ts}'], 9 | }; 10 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "typescript-rest-api-backend", 3 | "version": "1.0.0", 4 | "description": "Express Typescript Rest API backend template with full TS support following gts style guide and gke integration", 5 | "main": "src/index.ts", 6 | "scripts": { 7 | "start:dev": "cross-env NODE_ENV=development nodemon ./src/bin/server", 8 | "start:prod": "npm run prod", 9 | "prod": "cross-env NODE_ENV=production npm-run-all compile server", 10 | "server": "node ./build/src/bin/server", 11 | "lint": "gts lint", 12 | "clean": "gts clean", 13 | "compile": "npm run clean && tsc", 14 | "watch": "tsc -w", 15 | "fix": "gts fix", 16 | "prepare": "npm run compile", 17 | "pretest": "npm run compile", 18 | "posttest": "npm run lint", 19 | "test": "cross-env NODE_ENV=test jest --verbose", 20 | "docs": "rm -rf docs/ && typedoc", 21 | "generate:env": "sh ./scripts/generate-env.sh" 22 | }, 23 | "author": "Giuseppe Albrizio", 24 | "license": "MIT", 25 | "devDependencies": { 26 | "@types/bcryptjs": "^2.4.2", 27 | "@types/compression": "^1.7.2", 28 | "@types/cookie-parser": "^1.4.3", 29 | "@types/cors": "^2.8.13", 30 | "@types/debug": "^4.1.8", 31 | "@types/express": "^4.17.17", 32 | "@types/express-session": "^1.17.7", 33 | "@types/jest": "^29.5.3", 34 | "@types/jsonwebtoken": "^9.0.2", 35 | "@types/lodash": "^4.14.195", 36 | "@types/multer": "^1.4.7", 37 | "@types/node": "^20.4.1", 38 | "@types/passport": "^1.0.12", 39 | "@types/passport-google-oauth20": "^2.0.11", 40 | "@types/passport-local": "^1.0.35", 41 | "@types/pdfmake": "^0.2.2", 42 | "@types/sparkpost": "^2.1.5", 43 | "@types/supertest": "^2.0.12", 44 | "@types/swagger-ui-express": "^4.1.3", 45 | "@types/validator": "^13.7.17", 46 | "cross-env": "^7.0.3", 47 | "gts": "^3.1.1", 48 | "jest": "^29.6.1", 49 | "npm-run-all": "^4.1.5", 50 | "rimraf": "^5.0.1", 51 | "supertest": "^6.3.3", 52 | "ts-jest": "^29.1.1", 53 | "ts-node": "^10.9.1", 54 | "typedoc": "^0.24.8", 55 | "typescript": "~5.1.6" 56 | }, 57 | "dependencies": { 58 | "@google-cloud/pubsub": "^3.7.1", 59 | "@google-cloud/storage": "^6.11.0", 60 | "@googlemaps/google-maps-services-js": "^3.3.33", 61 | "@hokify/agenda": "^6.3.0", 62 | "@types/morgan": "^1.9.4", 63 | "axios": "^1.4.0", 64 | "bcryptjs": "^2.4.3", 65 | "clean-deep": "^3.4.0", 66 | "compression": "^1.7.4", 67 | "connect-mongo": "^5.0.0", 68 | "cookie-parser": "^1.4.6", 69 | "cors": "^2.8.5", 70 | "crypto": "^1.0.1", 71 | "crypto-random-string": "^5.0.0", 72 | "debug": "^4.3.4", 73 | "dotenv": "^16.3.1", 74 | "express": "^4.18.2", 75 | "express-mongo-sanitize": "^2.2.0", 76 | "express-rate-limit": "^6.7.1", 77 | "express-session": "^1.17.3", 78 | "firebase-admin": "^11.9.0", 79 | "helmet": "^7.0.0", 80 | "http": "^0.0.1-security", 81 | "jsonwebtoken": "^9.0.1", 82 | "lodash": "^4.17.21", 83 | "messagebird": "^4.0.1", 84 | "mongodb": "^5.7.0", 85 | "mongoose": "^7.3.3", 86 | "morgan": "^1.10.0", 87 | "multer": "^1.4.5-lts.1", 88 | "multer-cloud-storage": "^3.0.0", 89 | "mysql2": "^3.5.1", 90 | "nodemon": "^3.0.1", 91 | "passport": "^0.6.0", 92 | "passport-google-oauth20": "^2.0.0", 93 | "passport-local": "^1.0.0", 94 | "pdfmake": "^0.2.7", 95 | "slugify": "^1.6.6", 96 | "sparkpost": "^2.1.4", 97 | "swagger-ui-express": "^5.0.0", 98 | "validator": "^13.9.0", 99 | "winston": "^3.10.0", 100 | "xmlbuilder2": "^3.1.1", 101 | "xss-clean": "^0.1.1" 102 | } 103 | } 104 | -------------------------------------------------------------------------------- /public/index.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | Express 4 | 9 | 10 | 11 | 12 |

Express

13 |

Welcome to Express Typescript Rest API

14 |

Got to http://localhost:3000/api/v1 to test your API endpoint

15 | 16 | 17 | -------------------------------------------------------------------------------- /public/stylesheets/style.css: -------------------------------------------------------------------------------- 1 | body { 2 | padding: 50px; 3 | font: 14px "Lucida Grande", Helvetica, Arial, sans-serif; 4 | } 5 | 6 | a { 7 | color: #00B7FF; 8 | } 9 | -------------------------------------------------------------------------------- /scripts/generate-env.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | 3 | # Define color codes 4 | RED='\033[0;31m' 5 | GREEN='\033[0;32m' 6 | CYAN='\033[0;36m' 7 | YELLOW='\033[0;33m' 8 | NC='\033[0m' # No Color 9 | 10 | # Check if .env file exists and delete 11 | echo "${YELLOW}Deleting old .env file...${NC}" 12 | rm -f ./.env.test.local 13 | 14 | # Greet user 15 | echo "${CYAN}Hello! Let's set up your environment variables.${NC}" 16 | 17 | # Ask user for variable content 18 | # Ask user for variable content and validate input 19 | while true; do 20 | read -p "What is the HOST? [0.0.0.0] " HOST 21 | # HOST=${HOST:-0.0.0.0} # set default value for PORT 22 | if [ -z "$HOST" ]; then 23 | echo "${RED}HOST cannot be blank. Please enter a value.${NC}" 24 | else 25 | break 26 | fi 27 | done 28 | 29 | # Ask user for variable content and validate input 30 | while true; do 31 | read -p "What is the port you want to run the server on? [3000] " PORT 32 | # PORT=${PORT:-3000} # set default value for PORT 33 | if [ -z "$PORT" ]; then 34 | echo "${RED}PORT cannot be blank. Please enter a value.${NC}" 35 | else 36 | break 37 | fi 38 | done 39 | 40 | while true; do 41 | read -p "What is the name of the service? " SERVICE_NAME 42 | if [ -z "$SERVICE_NAME" ]; then 43 | echo "${RED}SERVICE_NAME cannot be blank. Please enter a value.${NC}" 44 | else 45 | break 46 | fi 47 | done 48 | 49 | while true; do 50 | read -p "What is your JWT_KEY? " JWT_KEY 51 | if [ -z "$JWT_KEY" ]; then 52 | echo "${RED}JWT_KEY cannot be blank. Please enter a value.${NC}" 53 | else 54 | break 55 | fi 56 | done 57 | 58 | while true; do 59 | read -p "What is your SECRET? " SECRET 60 | if [ -z "$SECRET" ]; then 61 | echo "${RED}SECRET cannot be blank. Please enter a value.${NC}" 62 | else 63 | break 64 | fi 65 | done 66 | 67 | HASH=10 # set default value for HASH 68 | 69 | read -p "What is the path to your Google Application Credentials file? [./src/config/gcloud/google-application-credentials.json] " GOOGLE_APPLICATION_CREDENTIALS 70 | GOOGLE_APPLICATION_CREDENTIALS=${GOOGLE_APPLICATION_CREDENTIALS:-./src/config/gcloud/google-application-credentials.json} # set default value for GOOGLE_APPLICATION_CREDENTIALS 71 | 72 | read -p "What is your Google Cloud project ID? " GOOGLE_PROJECT_ID 73 | read -p "What is your Google Cloud Storage bucket name? " GOOGLE_STORAGE_BUCKET_NAME 74 | read -p "What is your Google Client ID? " GOOGLE_CLIENT_ID 75 | read -p "What is your Google Client Secret? " GOOGLE_CLIENT_SECRET 76 | read -p "What is your Google Maps API key? " GOOGLE_MAPS_API_KEY 77 | 78 | read -p "What is your CLIENT_URL? [http://localhost:3000] " CLIENT_URL 79 | CLIENT_URL=${CLIENT_URL:-http://localhost:3000} # set default value for CLIENT_URL 80 | 81 | read -p "What is your MongoDB URI? [mongodb://localhost:27017/database_name] " MONGO_URI 82 | MONGO_URI=${MONGO_URI:-mongodb://localhost:27017/database_name} # set default value for MONGO_URI 83 | 84 | read -p "What is your MongoDB test URI? [mongodb://localhost:27017/test_database_name] " MONGO_URI_TEST 85 | MONGO_URI_TEST=${MONGO_URI_TEST:-mongodb://localhost:27017/test_database_name} # set default value for MONGO_URI_TEST 86 | 87 | read -p "What is your MongoDB username? " MONGO_USER 88 | MONGO_USER=${MONGO_USER:-'your_mongo_user'} # set default value for MONGO_USER and add single quotes around the value 89 | 90 | read -p "What is your MongoDB password? " MONGO_PASS 91 | MONGO_PASS=${MONGO_PASS:-'your_mongo_password'} # set default value for MONGO_PASS and add single quotes around the value 92 | 93 | read -p "What is your MySQL staging host? " MYSQL_HOST_STAGE 94 | MYSQL_HOST_STAGE=${MYSQL_HOST_STAGE:-'your_myql_host_stage'} # set default value for MYSQL_HOST_STAGE and add single quotes around the value 95 | 96 | read -p "What is your MySQL staging user? " MYSQL_USER_STAGE 97 | MYSQL_USER_STAGE=${MYSQL_USER_STAGE:-'your_myql_user'} # set default value for MYSQL_USER_STAGE and add single quotes around the value 98 | 99 | read -p "What is your MySQL staging password? " MYSQL_PASSWORD_STAGE 100 | MYSQL_PASSWORD_STAGE=${MYSQL_PASSWORD_STAGE:-'your_myql_pass'} # set default value for MYSQL_PASSWORD_STAGE and add single quotes around the value 101 | 102 | read -p "What is your MySQL staging database? " MYSQL_DB_STAGE 103 | MYSQL_DB_STAGE=${MYSQL_DB_STAGE:-'your_myql_db_name'} # set default value for MYSQL_DB_STAGE and add single quotes around the value 104 | 105 | read -p "What is your MySQL staging socket? " MYSQL_SOCKET_STAGE 106 | MYSQL_SOCKET_STAGE=${MYSQL_SOCKET_STAGE:-'/your/socket-cloud-sql'} # set default value for MYSQL_SOCKET_STAGE and add single quotes around the value 107 | 108 | read -p "What is your MySQL production host? " MYSQL_HOST_PROD 109 | MYSQL_HOST_PROD=${MYSQL_HOST_PROD:-'your_myql_host_stage'} # set default value for MYSQL_HOST_PROD and 110 | 111 | read -p "What is your MySQL production user? " MYSQL_USER_PROD 112 | MYSQL_USER_PROD=${MYSQL_USER_PROD:-'your_myql_user'} # set default value for MYSQL_USER_PROD and add single quotes around the value 113 | 114 | read -p "What is your MySQL production password? " MYSQL_PASSWORD_PROD 115 | MYSQL_PASSWORD_PROD=${MYSQL_PASSWORD_PROD:-'your_myql_pass'} # set default value for MYSQL_PASSWORD_PROD and add single quotes around the value 116 | 117 | read -p "What is your MySQL production database? " MYSQL_DB_PROD 118 | MYSQL_DB_PROD=${MYSQL_DB_PROD:-'your_myql_db_name'} # set default value for MYSQL_DB_PROD and add single quotes around the value 119 | 120 | read -p "What is your MySQL production socket? " MYSQL_SOCKET_PROD 121 | MYSQL_SOCKET_PROD=${MYSQL_SOCKET_PROD:-'/your/socket-cloud-sql'} # set default value for MYSQL_SOCKET_PROD and add single quotes around the value 122 | 123 | read -p "What is your SparkPost API key? " SPARKPOST_API_KEY 124 | SPARKPOST_API_KEY=${SPARKPOST_API_KEY:-'your_sparkpost_api_key'} # set default value for SPARKPOST_API_KEY and add single quotes around the value 125 | 126 | read -p "What is your SparkPost sender domain? " SPARKPOST_SENDER_DOMAIN 127 | SPARKPOST_SENDER_DOMAIN=${SPARKPOST_SENDER_DOMAIN:-'your_sparkpost_sender_domain'} # set default value for SPARKPOST_SENDER_DOMAIN and add single quotes around the value 128 | 129 | read -p "What is your MessageBird Access Key? " MESSAGEBIRD_ACCESS_KEY 130 | MESSAGEBIRD_ACCESS_KEY=${MESSAGEBIRD_ACCESS_KEY:-'your_messagbird_access_key'} # set default value for MESSAGEBIRD_ACCESS_KEY and add single quotes around the value 131 | 132 | read -p "What is your MessageBird WhatsApp Channel ID? " MESSAGEBIRD_WHATSAPP_CHANNEL_ID 133 | MESSAGEBIRD_WHATSAPP_CHANNEL_ID=${MESSAGEBIRD_WHATSAPP_CHANNEL_ID:-'your_messagebird_whatsapp_channel_id'} # set default value for MESSAGEBIRD_WHATSAPP_CHANNEL_ID and add single quotes around the value 134 | 135 | read -p "What is your MessageBird Template Namespace ID? " MESSAGEBIRD_TEMPLATE_NAMESPACE_ID 136 | MESSAGEBIRD_TEMPLATE_NAMESPACE_ID=${MESSAGEBIRD_TEMPLATE_NAMESPACE_ID:-'your_messagebird_template_namespace_id'} # set default value for MESSAGEBIRD_TEMPLATE_NAMESPACE_ID and add single quotes around the value 137 | 138 | # Write variables to .env file one level up from the script's location 139 | echo "# SERVER CONFIGURATION" >> ./.env.test.local 140 | echo "HOST=${HOST}" >> ./.env.test.local 141 | echo "PORT=${PORT}" >> ./.env.test.local 142 | echo "SERVICE_NAME='${SERVICE_NAME}'" >> ./.env.test.local 143 | 144 | echo "# JWT CONFIGURATION" >> ./.env.test.local 145 | echo "JWT_KEY='${JWT_KEY}'" >> ./.env.test.local 146 | echo "SECRET='${SECRET}'" >> ./.env.test.local 147 | echo "HASH=${HASH}" >> ./.env.test.local 148 | 149 | echo "# MONGO DB CONFIGURATION" >> ./.env.test.local 150 | echo "MONGO_URI='${MONGO_URI}'" >> ./.env.test.local 151 | echo "MONGO_URI_TEST='${MONGO_URI_TEST}'" >> ./.env.test.local 152 | echo "MONGO_USER='${MONGO_USER}'" >> ./.env.test.local 153 | echo "MONGO_PASS='${MONGO_PASS}'" >> ./.env.test.local 154 | 155 | echo "# GOOGLE CLOUD CONFIGURATION" >> ./.env.test.local 156 | echo "GOOGLE_APPLICATION_CREDENTIALS='${GOOGLE_APPLICATION_CREDENTIALS}'" >> ./.env.test.local 157 | echo "GOOGLE_PROJECT_ID='${GOOGLE_PROJECT_ID}'" >> ./.env.test.local 158 | echo "GOOGLE_STORAGE_BUCKET_NAME='${GOOGLE_STORAGE_BUCKET_NAME}'" >> ./.env.test.local 159 | echo "GOOGLE_CLIENT_ID='${GOOGLE_CLIENT_ID}'" >> ./.env.test.local 160 | echo "GOOGLE_CLIENT_SECRET='${GOOGLE_CLIENT_SECRET}'" >> ./.env.test.local 161 | echo "GOOGLE_MAPS_API_KEY='${GOOGLE_MAPS_API_KEY}'" >> ./.env.test.local 162 | 163 | echo "# CLIENT CONFIGURATION" >> ./.env.test.local 164 | echo "CLIENT_URL='${CLIENT_URL}'" >> ./.env.test.local 165 | 166 | echo "# MYSQL CONFIGURATION DEVELOPMENT" >> ./.env.test.local 167 | echo "MYSQL_HOST_STAGE='${MYSQL_HOST_STAGE}'" >> ./.env.test.local 168 | echo "MYSQL_USER_STAGE='${MYSQL_USER_STAGE}'" >> ./.env.test.local 169 | echo "MYSQL_PASSWORD_STAGE='${MYSQL_PASSWORD_STAGE}'" >> ./.env.test.local 170 | echo "MYSQL_DB_STAGE='${MYSQL_DB_STAGE}'" >> ./.env.test.local 171 | echo "MYSQL_SOCKET_STAGE='${MYSQL_SOCKET_STAGE}'" >> ./.env.test.local 172 | 173 | echo "# MYSQL CONFIGURATION PRODUCTION" >> ./.env.test.local 174 | echo "MYSQL_HOST_PROD='${MYSQL_HOST_PROD}'" >> ./.env.test.local 175 | echo "MYSQL_USER_PROD='${MYSQL_USER_PROD}'" >> ./.env.test.local 176 | echo "MYSQL_PASSWORD_PROD='${MYSQL_PASSWORD_PROD}'" >> ./.env.test.local 177 | echo "MYSQL_DB_PROD='${MYSQL_DB_PROD}'" >> ./.env.test.local 178 | echo "MYSQL_SOCKET_PROD='${MYSQL_SOCKET_PROD}'" >> ./.env.test.local 179 | 180 | echo "# SPARKPOST CONFIGURATION" >> ./.env.test.local 181 | echo "SPARKPOST_API_KEY='${SPARKPOST_API_KEY}'" >> ./.env.test.local 182 | echo "SPARKPOST_SENDER_DOMAIN='${SPARKPOST_SENDER_DOMAIN}'" >> ./.env.test.local 183 | 184 | echo "# MESSAGEBIRD CONFIGURATION" >> ./.env.test.local 185 | echo "MESSAGEBIRD_ACCESS_KEY='${MESSAGEBIRD_ACCESS_KEY}'" >> ./.env.test.local 186 | echo "MESSAGEBIRD_WHATSAPP_CHANNEL_ID='${MESSAGEBIRD_WHATSAPP_CHANNEL_ID}'" >> ./.env.test.local 187 | echo "MESSAGEBIRD_TEMPLATE_NAMESPACE_ID='${MESSAGEBIRD_TEMPLATE_NAMESPACE_ID}'" >> ./.env.test.local 188 | 189 | # Success message 190 | echo "${GREEN}Your environment variables have been written to ./.env.test.local. Thank you for using this script!${NC}" 191 | echo "${GREEN}Please make sure to copy the .evn.test.local file to .env before going to production.${NC}" 192 | -------------------------------------------------------------------------------- /scripts/skaffold-dev.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | Check kubectl context 4 | CURRENT_CONTEXT=$(kubectl config current-context) 5 | if [ "$CURRENT_CONTEXT" != "docker-desktop" ]; then 6 | echo "Please set kubectl context to docker-desktop before running this script." 7 | exit 1 8 | fi 9 | 10 | Set the options 11 | NO_PRUNE=false 12 | CACHE_ARTIFACTS=false 13 | 14 | Parse the options 15 | while getopts ":npca" opt; do 16 | case $opt in 17 | n) NO_PRUNE=true ;; 18 | p) NO_PRUNE=false ;; 19 | c) CACHE_ARTIFACTS=true ;; 20 | a) CACHE_ARTIFACTS=false ;; 21 | ?) echo "Invalid option: -$OPTARG" >&2 ;; 22 | esac 23 | done 24 | 25 | Run the skaffold dev command with the options 26 | skaffold dev --no-prune=$NO_PRUNE --cache-artifacts=$CACHE_ARTIFACTS 27 | -------------------------------------------------------------------------------- /src/api/config/roles.config.ts: -------------------------------------------------------------------------------- 1 | /** 2 | * Roles are used to define the access rights of a user. 3 | * we use a custom middleware to check if the user has the right to access a route. 4 | * the middleware is located in the middleware folder (verifyApiRights.middleware.ts) 5 | */ 6 | export interface IApiRoles { 7 | superAdmin: string[]; 8 | admin: string[]; 9 | employee: string[]; 10 | client: string[]; 11 | vendor: string[]; 12 | user: string[]; 13 | } 14 | 15 | const roles: IApiRoles = { 16 | superAdmin: ['*', 'getUsers', 'createUsers', 'manageUsers', ' deleteUsers'], 17 | admin: ['getUsers', 'createUsers', 'manageUsers', ' deleteUsers'], 18 | employee: ['getUsers'], 19 | client: ['getUsers'], 20 | vendor: ['getUsers'], 21 | user: ['getUsers'], 22 | }; 23 | 24 | export const apiRoles = Object.keys(roles); 25 | 26 | export const apiRolesRights = new Map(Object.entries(roles)); 27 | -------------------------------------------------------------------------------- /src/api/v1/app/README.md: -------------------------------------------------------------------------------- 1 | # API App Readme 2 | 3 | This is a sample controller and route for an Express app created for testing purposes. 4 | 5 | ## Getting Started 6 | 7 | ### Prerequisites 8 | 9 | - Node.js installed on your local machine 10 | - An understanding of the basics of Node.js and Express 11 | 12 | ### Installing 13 | 14 | 1. Clone the repository 15 | 2. Install dependencies by running `npm install` 16 | 3. Start the server by running `npm start` 17 | 4. Access the app at `http://localhost:3000/api/v1` with the following routes 18 | 19 | ## Usage 20 | 21 | The app has the following endpoints: 22 | 23 | - `/test-route-protection`: A protected route to check if the user is authenticated 24 | - `/test-check-authenticated-user`: A route to check the authenticated user 25 | - `/test-pubsub-publish`: A route to publish a message to a Google PubSub topic 26 | - `/test-pubsub-pull-subscription`: A route to receive a message from a Google PubSub pull subscription 27 | - `/test-pubsub-push-subscription`: A route to receive a message from a Google PubSub push subscription 28 | - `/test-pdf-make`: A route to generate a PDF 29 | 30 | To use the endpoints, send a request to the respective endpoint using a tool like Postman. 31 | 32 | ## Controller Functions 33 | 34 | The app has the following controller functions: 35 | 36 | ### `checkRouteProtection` 37 | 38 | A function to check if the user is authenticated and the test is completed. 39 | 40 | ### `checkUserLogged` 41 | 42 | A function to check the authenticated user. 43 | 44 | ### `checkPubSubPublish` 45 | 46 | A function to publish a message to a Google PubSub topic. 47 | 48 | ### `checkPubSubPullSubscription` 49 | 50 | A function to receive a message from a Google PubSub pull subscription. 51 | 52 | ### `checkPubsubPushSubscription` 53 | 54 | A function to receive a message from a Google PubSub push subscription. 55 | 56 | ### `checkPDFMake` 57 | 58 | A function to generate a PDF. 59 | 60 | ## Acknowledgments 61 | 62 | This app was created for testing purposes only. 63 | -------------------------------------------------------------------------------- /src/api/v1/app/app.controller.ts: -------------------------------------------------------------------------------- 1 | import {Response} from 'express'; 2 | import {CustomError} from '../../../errors/CustomError.error'; 3 | import {ICustomExpressRequest} from '../../../middlewares/currentUser.middleware'; 4 | 5 | import Logger from '../../../lib/logger'; 6 | import { 7 | publishMessageToPubSubTopic, 8 | listenForPubSubPullSubscription, 9 | PubSubPublishError, 10 | } from '../../../services/google-pub-sub/pubsub.service'; 11 | import {generatePDF, IPDFObject} from '../../../services/pdf/pdf.service'; 12 | import {generateXML, IXMLObject} from '../../../services/xml/xml.service'; 13 | import { 14 | IFirebaseMessage, 15 | sendMulticastFirebaseMessage, 16 | sendSingleFirebaseMessage, 17 | } from '../../../services/messaging/firebase.service'; 18 | 19 | /** 20 | * Test controller - Protected router test 21 | * @param req - Custom request object 22 | * @param res - Response object 23 | */ 24 | const checkRouteProtection = ( 25 | req: ICustomExpressRequest, 26 | res: Response 27 | ): void => { 28 | res.status(200).json({ 29 | status: 'success', 30 | data: { 31 | message: 'Yes you are authenticated and the test is completed', 32 | }, 33 | }); 34 | }; 35 | 36 | /** 37 | * Test controller - Check authenticated user 38 | * @param req 39 | * @param res 40 | */ 41 | const checkUserLogged = async (req: ICustomExpressRequest, res: Response) => { 42 | try { 43 | res.status(200).json({ 44 | status: 'success', 45 | message: 'User logged retrieved', 46 | userInPassport: req?.user, 47 | userInSession: req?.session, 48 | userInCustomMiddleware: req.currentUser, 49 | }); 50 | } catch (error) { 51 | Logger.debug(error); 52 | if (error instanceof CustomError) { 53 | throw new CustomError(error.statusCode, error.message); 54 | } 55 | } 56 | }; 57 | 58 | /** 59 | * Test controller - Check PubSub publish message to a topic 60 | * @param req 61 | * @param res 62 | */ 63 | const checkPubSubPublish = async ( 64 | req: ICustomExpressRequest, 65 | res: Response 66 | ) => { 67 | try { 68 | const message = await publishMessageToPubSubTopic( 69 | {test: 'test', message: 'this is a message'}, 70 | 'test' 71 | ); 72 | 73 | res.status(200).json({ 74 | status: 'success', 75 | message: 'Message published to PubSub', 76 | response: {messageId: message}, 77 | }); 78 | } catch (error) { 79 | if (error instanceof PubSubPublishError) { 80 | res.status(error.statusCode).json({ 81 | status: 'error', 82 | message: error.message, 83 | }); 84 | } else { 85 | res.status(500).json({ 86 | status: 'error', 87 | message: 'Failed to publish message to PubSub. Reason not known', 88 | }); 89 | } 90 | } 91 | }; 92 | 93 | /** 94 | * Test controller - Check PubSub message from a pull subscription 95 | * @param req 96 | * @param res 97 | */ 98 | const checkPubSubPullSubscription = async ( 99 | req: ICustomExpressRequest, 100 | res: Response 101 | ) => { 102 | try { 103 | const response = await listenForPubSubPullSubscription( 104 | 'test-pull-subscription', 105 | 10 106 | ); 107 | 108 | res.status(200).json({ 109 | status: 'success', 110 | message: 'Message received from PubSub Pull Subscription', 111 | response, 112 | }); 113 | } catch (error) { 114 | if (error instanceof PubSubPublishError) { 115 | res.status(error.statusCode).json({ 116 | status: 'error', 117 | message: error.message, 118 | }); 119 | } else { 120 | res.status(500).json({ 121 | status: 'error', 122 | message: 'Failed to listen for pull message. Reason not known', 123 | }); 124 | } 125 | } 126 | }; 127 | 128 | /** 129 | * Test controller - Check PubSub message from a push subscription 130 | * @param req 131 | * @param res 132 | */ 133 | const checkPubsubPushSubscription = async ( 134 | req: ICustomExpressRequest, 135 | res: Response 136 | ) => { 137 | try { 138 | const data = Buffer.from(req.body.message.data, 'base64').toString('utf-8'); 139 | 140 | const response = await JSON.parse(data); 141 | Logger.debug(response); 142 | 143 | res.status(200).send('Message received from PubSub Push Subscription'); 144 | } catch (error) { 145 | Logger.error(error); 146 | if (error instanceof CustomError) { 147 | res.status(error.statusCode).json({ 148 | status: 'error', 149 | message: error.message, 150 | }); 151 | } else { 152 | res.status(500).json({ 153 | status: 'error', 154 | message: 'Failed to listen for push message. Reason not known', 155 | }); 156 | } 157 | } 158 | }; 159 | 160 | /** 161 | * Test controller - Check PDF generation 162 | * @param req 163 | * @param res 164 | * @returns 165 | */ 166 | const checkPDFMake = async (req: ICustomExpressRequest, res: Response) => { 167 | try { 168 | const body: IPDFObject = { 169 | key: 'value', 170 | }; 171 | 172 | const directory = 'pdfs'; 173 | 174 | const response = await generatePDF(body, directory); 175 | 176 | return res.status(200).json({ 177 | status: 'success', 178 | message: 'PDF generated', 179 | response, 180 | }); 181 | } catch (error) { 182 | Logger.error(error); 183 | if (error instanceof CustomError) { 184 | res.status(error.statusCode).json({ 185 | status: 'error', 186 | message: error.message, 187 | }); 188 | } else { 189 | res.status(500).json({ 190 | status: 'error', 191 | message: `Failed to generate PDF. Reason error: ${error}`, 192 | }); 193 | } 194 | } 195 | }; 196 | 197 | /** 198 | * Test controller - Check XML generation 199 | * @param req 200 | * @param res 201 | * @returns 202 | */ 203 | const checkXMLBuilder = async (req: ICustomExpressRequest, res: Response) => { 204 | try { 205 | const body: IXMLObject = { 206 | key: 'value', 207 | }; 208 | 209 | const response = await generateXML(body); 210 | 211 | return res.status(200).json({ 212 | status: 'success', 213 | message: 'XML generated', 214 | response, 215 | }); 216 | } catch (error) { 217 | Logger.error(error); 218 | if (error instanceof CustomError) { 219 | res.status(error.statusCode).json({ 220 | status: 'error', 221 | message: error.message, 222 | }); 223 | } else { 224 | res.status(500).json({ 225 | status: 'error', 226 | message: `Failed to generate PDF. Reason error: ${error}`, 227 | }); 228 | } 229 | } 230 | }; 231 | 232 | /** 233 | * Test controller - Check Firebase single notification 234 | * @param req 235 | * @param res 236 | */ 237 | const checkFirebaseSingleNotification = async ( 238 | req: ICustomExpressRequest, 239 | res: Response 240 | ) => { 241 | try { 242 | const {message, userId} = req.body; 243 | 244 | // validate that the message object has the correct interface 245 | const validatedMessage: IFirebaseMessage = message; 246 | 247 | const response = await sendSingleFirebaseMessage(validatedMessage, userId); 248 | 249 | res.status(200).json({ 250 | status: 'success', 251 | message: 'Message sent to Firebase', 252 | response, 253 | }); 254 | } catch (error) { 255 | if (error instanceof CustomError) { 256 | res.status(error.statusCode).json({ 257 | status: 'error', 258 | message: error.message, 259 | }); 260 | } else { 261 | res.status(500).json({ 262 | status: 'error', 263 | message: 'Failed to send message to Firebase', 264 | error, 265 | }); 266 | } 267 | } 268 | }; 269 | 270 | /** 271 | * Test controller - Check Firebase multicast notification 272 | * @param req 273 | * @param res 274 | */ 275 | const checkFirebaseMulticastNotification = async ( 276 | req: ICustomExpressRequest, 277 | res: Response 278 | ) => { 279 | try { 280 | const {message, usersId} = req.body; 281 | 282 | // validate that the message object has the correct interface 283 | const validatedMessage: IFirebaseMessage = message; 284 | 285 | if (!Array.isArray(usersId)) { 286 | throw new CustomError(400, 'usersId must be an array'); 287 | } 288 | 289 | const response = await sendMulticastFirebaseMessage( 290 | validatedMessage, 291 | usersId 292 | ); 293 | 294 | res.status(200).json({ 295 | status: response.status, 296 | message: response.message, 297 | response: response.response, 298 | }); 299 | } catch (error) { 300 | if (error instanceof CustomError) { 301 | res.status(error.statusCode).json({ 302 | status: 'error', 303 | message: error.message, 304 | }); 305 | } else { 306 | res.status(500).json({ 307 | status: 'error', 308 | message: 'Failed to send message to Firebase', 309 | error, 310 | }); 311 | } 312 | } 313 | }; 314 | 315 | export { 316 | checkRouteProtection, 317 | checkUserLogged, 318 | checkPubSubPublish, 319 | checkPubSubPullSubscription, 320 | checkPubsubPushSubscription, 321 | checkPDFMake, 322 | checkXMLBuilder, 323 | checkFirebaseSingleNotification, 324 | checkFirebaseMulticastNotification, 325 | }; 326 | -------------------------------------------------------------------------------- /src/api/v1/app/app.route.ts: -------------------------------------------------------------------------------- 1 | import express from 'express'; 2 | import catchAsyncHandler from '../../../middlewares/catchAsyncHandler.middleware'; 3 | 4 | import {requireAuthenticationMiddleware} from '../../../middlewares/requireAuthentication.middleware'; 5 | import { 6 | checkPubSubPullSubscription, 7 | checkPubsubPushSubscription, 8 | checkPubSubPublish, 9 | checkRouteProtection, 10 | checkUserLogged, 11 | checkPDFMake, 12 | checkXMLBuilder, 13 | checkFirebaseSingleNotification, 14 | checkFirebaseMulticastNotification, 15 | } from './app.controller'; 16 | 17 | const appRouter = express.Router(); 18 | 19 | appRouter.get( 20 | '/test-route-protection', 21 | requireAuthenticationMiddleware, 22 | catchAsyncHandler(checkRouteProtection) 23 | ); 24 | 25 | appRouter.get( 26 | '/test-check-authenticated-user', 27 | requireAuthenticationMiddleware, 28 | catchAsyncHandler(checkUserLogged) 29 | ); 30 | 31 | appRouter.post( 32 | '/test-pubsub-publish', 33 | requireAuthenticationMiddleware, 34 | catchAsyncHandler(checkPubSubPublish) 35 | ); 36 | 37 | appRouter.post( 38 | '/test-pubsub-pull-subscription', 39 | requireAuthenticationMiddleware, 40 | catchAsyncHandler(checkPubSubPullSubscription) 41 | ); 42 | 43 | appRouter.post( 44 | '/test-pubsub-push-subscription', 45 | requireAuthenticationMiddleware, 46 | catchAsyncHandler(checkPubsubPushSubscription) 47 | ); 48 | 49 | appRouter.post( 50 | '/test-pdf-make', 51 | requireAuthenticationMiddleware, 52 | catchAsyncHandler(checkPDFMake) 53 | ); 54 | 55 | appRouter.post( 56 | '/test-xml-builder', 57 | requireAuthenticationMiddleware, 58 | catchAsyncHandler(checkXMLBuilder) 59 | ); 60 | 61 | appRouter.post( 62 | '/test-firebase-single-message', 63 | requireAuthenticationMiddleware, 64 | catchAsyncHandler(checkFirebaseSingleNotification) 65 | ); 66 | 67 | appRouter.post( 68 | '/test-firebase-multicast-message', 69 | requireAuthenticationMiddleware, 70 | catchAsyncHandler(checkFirebaseMulticastNotification) 71 | ); 72 | 73 | export default appRouter; 74 | -------------------------------------------------------------------------------- /src/api/v1/auth/auth.controller.ts: -------------------------------------------------------------------------------- 1 | import {NextFunction, Response} from 'express'; 2 | import {IVerifyOptions} from 'passport-local'; 3 | 4 | import {ICustomExpressRequest} from '../../../middlewares/currentUser.middleware'; 5 | import createCookieFromToken from '../../../utils/createCookieFromToken.utils'; 6 | import {CustomError} from '../../../errors'; 7 | import Logger from '../../../lib/logger'; 8 | import passport from '../../../config/passport.config'; 9 | import User, {IUserMethods} from '../user/user.model'; 10 | import {sendResetPasswordToken} from '../../../services/email/sparkpost.service'; 11 | 12 | /** 13 | * Signup Local strategy 14 | * @param req 15 | * @param res 16 | * @param next 17 | * @returns 18 | */ 19 | const signup = ( 20 | req: ICustomExpressRequest, 21 | res: Response, 22 | next: NextFunction 23 | ): Promise => { 24 | return passport.authenticate( 25 | 'signup', 26 | {session: false}, 27 | async (err: Error, user: IUserMethods, info: IVerifyOptions) => { 28 | try { 29 | if (err || !user) { 30 | const {message} = info; 31 | return res.status(400).json({ 32 | status: 'error', 33 | error: { 34 | message, 35 | }, 36 | }); 37 | } 38 | createCookieFromToken(user, 201, req, res); 39 | } catch (error) { 40 | Logger.error(error); 41 | if (error instanceof CustomError) { 42 | throw new CustomError(error.statusCode, error.message); 43 | } 44 | } 45 | } 46 | )(req, res, next); 47 | }; 48 | 49 | /** 50 | * Login Local strategy 51 | * @param req 52 | * @param res 53 | * @param next 54 | */ 55 | const login = ( 56 | req: ICustomExpressRequest, 57 | res: Response, 58 | next: NextFunction 59 | ) => { 60 | passport.authenticate( 61 | 'login', 62 | {session: false}, 63 | async (err: Error, user: IUserMethods, info: IVerifyOptions) => { 64 | try { 65 | if (err || !user) { 66 | return res.status(401).json({ 67 | status: 'error', 68 | error: { 69 | message: info.message, 70 | }, 71 | }); 72 | } 73 | // call req.login manually to set the session and 74 | // init passport correctly in serialize & deserialize 75 | req.logIn(user, error => { 76 | if (error) { 77 | return next(error); 78 | } 79 | }); 80 | 81 | // generate a signed json web token with the contents of user 82 | // object and return it in the response 83 | createCookieFromToken(user, 200, req, res); 84 | } catch (error) { 85 | console.log(error); 86 | Logger.error(error); 87 | if (error instanceof CustomError) { 88 | throw new CustomError(error.statusCode, error.message); 89 | } 90 | } 91 | } 92 | )(req, res, next); 93 | }; 94 | 95 | /** 96 | * Logout 97 | * @param req 98 | * @param res 99 | * @param next 100 | */ 101 | const logout = ( 102 | req: ICustomExpressRequest, 103 | res: Response, 104 | next: NextFunction 105 | ) => { 106 | try { 107 | res.clearCookie('jwt'); 108 | res.clearCookie('connect.sid'); 109 | req.session.destroy(error => { 110 | if (error) { 111 | return next(error); 112 | } 113 | return res.status(200).json({ 114 | status: 'success', 115 | message: 'You have successfully logged out', 116 | }); 117 | }); 118 | } catch (error) { 119 | Logger.error(error); 120 | if (error instanceof CustomError) { 121 | throw new CustomError(error.statusCode, error.message); 122 | } 123 | } 124 | }; 125 | 126 | /** 127 | * Recover password 128 | * @param req 129 | * @param res 130 | * @returns 131 | */ 132 | const recoverPassword = async (req: ICustomExpressRequest, res: Response) => { 133 | try { 134 | const {email} = req.body; 135 | const user = await User.findOne({email}).exec(); 136 | 137 | if (!user) { 138 | return res.status(404).json({ 139 | status: 'error', 140 | error: { 141 | status: 'error', 142 | message: 'User not found', 143 | }, 144 | }); 145 | } 146 | 147 | // Destroy session and remove any cookie 148 | req.session.destroy(() => { 149 | res.clearCookie('jwt'); 150 | }); 151 | 152 | res.clearCookie('jwt'); 153 | 154 | // Generate and set password reset token 155 | user.generatePasswordResetToken(); 156 | 157 | // Save the updated user object with a resetPasswordToken and expire 158 | await user.save(); 159 | 160 | // Send email to the user with the token 161 | const sendEmail = await sendResetPasswordToken( 162 | user.email as string, 163 | user.resetPasswordToken as string 164 | ); 165 | 166 | res.status(200).json({ 167 | status: 'success', 168 | message: `A reset email has been sent to ${user.email}.`, 169 | user: { 170 | email: user.email, 171 | token: user.resetPasswordToken, 172 | }, 173 | emailStatus: sendEmail, 174 | }); 175 | } catch (error) { 176 | Logger.error(error); 177 | if (error instanceof CustomError) { 178 | throw new CustomError(error.statusCode, error.message); 179 | } else { 180 | res.status(500).json({ 181 | status: 'error', 182 | message: 'Email could not be sent', 183 | error, 184 | }); 185 | } 186 | } 187 | }; 188 | 189 | /** 190 | * Reset password 191 | * @param req 192 | * @param res 193 | * @param next 194 | */ 195 | const resetPassword = ( 196 | req: ICustomExpressRequest, 197 | res: Response, 198 | next: NextFunction 199 | ) => { 200 | passport.authenticate( 201 | 'reset-password', 202 | {session: false}, 203 | async (err: Error, user: IUserMethods, info: IVerifyOptions) => { 204 | try { 205 | if (err || !user) { 206 | return res.status(400).json({ 207 | status: 'error', 208 | error: { 209 | message: info.message, 210 | }, 211 | }); 212 | } 213 | 214 | res.status(200).json({ 215 | status: 'success', 216 | message: 'Password successfully updated', 217 | }); 218 | } catch (error) { 219 | Logger.error(error); 220 | 221 | if (error instanceof CustomError) { 222 | throw new CustomError(error.statusCode, error.message); 223 | } 224 | } 225 | } 226 | )(req, res, next); 227 | }; 228 | 229 | /** 230 | * Return authenticated user 231 | * @param req 232 | * @param res 233 | * @returns 234 | */ 235 | const returnUserLogged = async (req: ICustomExpressRequest, res: Response) => { 236 | try { 237 | if (!req.currentUser) { 238 | return res.status(401).json({ 239 | status: 'error', 240 | error: { 241 | message: 242 | 'If you can see this message there is something wrong with authentication', 243 | }, 244 | }); 245 | } 246 | 247 | const user = await User.findById(req.currentUser?.id); 248 | 249 | res.status(200).json({ 250 | status: 'success', 251 | message: 'User logged retrieved', 252 | data: { 253 | user, 254 | }, 255 | }); 256 | } catch (error) { 257 | Logger.error(error); 258 | if (error instanceof CustomError) { 259 | throw new CustomError(error.statusCode, error.message); 260 | } 261 | } 262 | }; 263 | 264 | /** 265 | * Google login 266 | * @param req 267 | * @param res 268 | */ 269 | const googleLogin = async (req: ICustomExpressRequest, res: Response) => { 270 | try { 271 | const user = req.user as IUserMethods; 272 | 273 | createCookieFromToken(user, 201, req, res); 274 | } catch (error) { 275 | Logger.debug(error); 276 | if (error instanceof CustomError) { 277 | throw new CustomError(error.statusCode, error.message); 278 | } 279 | } 280 | }; 281 | 282 | export { 283 | signup, 284 | login, 285 | logout, 286 | recoverPassword, 287 | resetPassword, 288 | returnUserLogged, 289 | googleLogin, 290 | }; 291 | -------------------------------------------------------------------------------- /src/api/v1/auth/auth.route.ts: -------------------------------------------------------------------------------- 1 | import express from 'express'; 2 | import passport from '../../../config/passport.config'; 3 | import { 4 | recoverPasswordApiLimiter, 5 | resetPasswordApiLimiter, 6 | } from '../../../middlewares/apiRateLimit.middleware'; 7 | import catchAsyncHandler from '../../../middlewares/catchAsyncHandler.middleware'; 8 | import {requireAuthenticationMiddleware} from '../../../middlewares/requireAuthentication.middleware'; 9 | 10 | import { 11 | googleLogin, 12 | login, 13 | logout, 14 | recoverPassword, 15 | resetPassword, 16 | returnUserLogged, 17 | signup, 18 | } from './auth.controller'; 19 | 20 | const authRouter = express.Router(); 21 | 22 | authRouter.post('/signup', catchAsyncHandler(signup)); 23 | authRouter.post('/login', catchAsyncHandler(login)); 24 | authRouter.post('/logout', catchAsyncHandler(logout)); 25 | authRouter.post( 26 | '/recover-password', 27 | recoverPasswordApiLimiter, 28 | catchAsyncHandler(recoverPassword) 29 | ); 30 | authRouter.post( 31 | '/reset-password', 32 | resetPasswordApiLimiter, 33 | catchAsyncHandler(resetPassword) 34 | ); 35 | authRouter.get( 36 | '/me', 37 | requireAuthenticationMiddleware, 38 | catchAsyncHandler(returnUserLogged) 39 | ); 40 | 41 | /** 42 | * Social Authentication: Google 43 | */ 44 | authRouter.get( 45 | '/google', 46 | passport.authenticate('google', { 47 | session: false, 48 | scope: ['profile', 'email'], 49 | }) 50 | ); 51 | // callback route for Google authentication 52 | authRouter.get( 53 | '/google/callback', 54 | passport.authenticate('google', { 55 | session: false, 56 | scope: ['profile', 'email'], 57 | }), 58 | googleLogin 59 | ); 60 | 61 | export default authRouter; 62 | -------------------------------------------------------------------------------- /src/api/v1/database-logs/databaseLog.model.ts: -------------------------------------------------------------------------------- 1 | import dotenv from 'dotenv'; 2 | import mongoose, { 3 | Types, 4 | Document, 5 | HydratedDocument, 6 | Model, 7 | Schema, 8 | } from 'mongoose'; 9 | import {CustomError} from '../../../errors'; 10 | 11 | dotenv.config(); 12 | 13 | if (!process.env.JWT_KEY) { 14 | throw new CustomError( 15 | 404, 16 | 'Please provide a JWT_KEY as global environment variable' 17 | ); 18 | } 19 | 20 | export interface IDatabaseLog { 21 | _id: Types.ObjectId; 22 | type: string; 23 | date: Date; 24 | level: string; 25 | details: { 26 | channel: string; 27 | message: string; 28 | status: string; 29 | response?: Schema.Types.Mixed; 30 | }; 31 | } 32 | 33 | export interface IDatabaseLogMethods { 34 | toJSON(): Document; 35 | } 36 | 37 | export interface IDatabaseLogModel 38 | extends Model { 39 | checkExistingField: ( 40 | field: string, 41 | value: string 42 | ) => Promise>; 43 | } 44 | 45 | const DatabaseLogSchema = new Schema< 46 | IDatabaseLog, 47 | IDatabaseLogModel, 48 | IDatabaseLogMethods 49 | >( 50 | { 51 | type: {type: String, required: true}, 52 | date: {type: Date, required: true}, 53 | level: {type: String, required: true}, 54 | details: { 55 | channel: {type: String, required: true}, 56 | message: {type: String, required: true}, 57 | status: {type: String, required: true}, 58 | response: Schema.Types.Mixed, 59 | }, 60 | }, 61 | { 62 | toJSON: { 63 | virtuals: true, 64 | getters: true, 65 | }, 66 | toObject: { 67 | virtuals: true, 68 | getters: true, 69 | }, 70 | timestamps: true, 71 | } 72 | ); 73 | 74 | DatabaseLogSchema.index({ 75 | type: 1, 76 | date: 1, 77 | level: 1, 78 | 'details.channel': 1, 79 | 'details.message': 1, 80 | 'details.status': 1, 81 | }); 82 | 83 | DatabaseLogSchema.methods.toJSON = function () { 84 | const logObj = this.toObject(); 85 | logObj.id = logObj._id; // remap _id to id 86 | 87 | delete logObj._id; 88 | delete logObj.__v; 89 | return logObj; 90 | }; 91 | 92 | DatabaseLogSchema.statics.checkExistingField = async function ( 93 | field: string, 94 | value: string 95 | ) { 96 | const log = await this.findOne({[field]: value}); 97 | return log; 98 | }; 99 | 100 | const DatabaseLog = mongoose.model( 101 | 'DatabaseLog', 102 | DatabaseLogSchema, 103 | 'logs' 104 | ); 105 | 106 | export default DatabaseLog; 107 | -------------------------------------------------------------------------------- /src/api/v1/index.route.ts: -------------------------------------------------------------------------------- 1 | import express, {Response} from 'express'; 2 | import _ from 'lodash'; 3 | import {ICustomExpressRequest} from '../../middlewares/currentUser.middleware'; 4 | 5 | import appRouter from './app/app.route'; 6 | import authRouter from './auth/auth.route'; 7 | 8 | import swaggerRouter from './swagger/swagger.route'; 9 | import typedocRouter from './typedoc/typedoc.route'; 10 | 11 | import { 12 | apiV1RateLimiter, 13 | devlopmentApiLimiter, 14 | } from '../../middlewares/apiRateLimit.middleware'; 15 | 16 | const apiV1Router = express.Router(); 17 | 18 | apiV1Router.get('/', (req: ICustomExpressRequest, res: Response) => { 19 | res.status(200).json({ 20 | status: 'success', 21 | message: 'Healthy check completed successfully', 22 | }); 23 | }); 24 | 25 | const defaultRoutes = [ 26 | { 27 | path: '/app', 28 | route: appRouter, 29 | }, 30 | { 31 | path: '/auth', 32 | route: authRouter, 33 | }, 34 | ]; 35 | 36 | const devRoutes = [ 37 | { 38 | path: '/documentation', 39 | route: swaggerRouter, 40 | }, 41 | { 42 | path: '/typedoc', // this route will serve typedoc generated documentation 43 | route: typedocRouter, 44 | }, 45 | ]; 46 | 47 | _.forEach(defaultRoutes, route => { 48 | apiV1Router.use(apiV1RateLimiter); 49 | apiV1Router.use(route.path, route.route); 50 | }); 51 | 52 | if (process.env.NODE_ENV === 'development') { 53 | _.forEach(devRoutes, route => { 54 | apiV1Router.use(devlopmentApiLimiter); 55 | apiV1Router.use(route.path, route.route); 56 | }); 57 | } 58 | 59 | export default apiV1Router; 60 | -------------------------------------------------------------------------------- /src/api/v1/swagger/swagger.json: -------------------------------------------------------------------------------- 1 | { 2 | "openapi": "3.0.0", 3 | "info": { 4 | "title": "Express Typescript Rest Api", 5 | "description": "Express Typescript Rest Api", 6 | "termsOfService": "https://github.com/giuseppealbrizio/express-typescript-rest-api", 7 | "contact": { 8 | "email": "g.albrizio@gmail.com" 9 | }, 10 | "license": { 11 | "name": "MIT", 12 | "url": "https://opensource.org/licenses/MIT" 13 | }, 14 | "version": "1.0.0" 15 | }, 16 | "externalDocs": { 17 | "description": "Find out more about this template", 18 | "url": "https://github.com/giuseppealbrizio" 19 | }, 20 | "servers": [ 21 | { 22 | "url": "http://localhost:3000/api/v1" 23 | }, 24 | { 25 | "url": "http://localhost:3000/api/v1" 26 | } 27 | ], 28 | "tags": [ 29 | { 30 | "name": "App", 31 | "description": "App routes" 32 | } 33 | ], 34 | "paths": { 35 | "/app": { 36 | "get": { 37 | "tags": ["App"], 38 | "summary": "App router", 39 | "operationId": "appTest", 40 | "responses": { 41 | "200": { 42 | "description": "successful operation", 43 | "content": { 44 | "application/json": { 45 | "schema": { 46 | "type": "object", 47 | "properties": { 48 | "status": { 49 | "type": "string" 50 | }, 51 | "message": { 52 | "type": "string" 53 | } 54 | } 55 | } 56 | } 57 | } 58 | }, 59 | "400": { 60 | "description": "Missing credentials", 61 | "content": {} 62 | }, 63 | "401": { 64 | "description": "Invalid token, please log in or sign up", 65 | "content": { 66 | "application/json": { 67 | "schema": { 68 | "type": "object", 69 | "properties": { 70 | "status": { 71 | "type": "string" 72 | }, 73 | "error": { 74 | "type": "object", 75 | "properties": { 76 | "message": { 77 | "type": "string" 78 | }, 79 | "trace": { 80 | "type": "object", 81 | "properties": { 82 | "statusCode": { 83 | "type": "number" 84 | } 85 | } 86 | } 87 | } 88 | } 89 | } 90 | } 91 | } 92 | } 93 | } 94 | }, 95 | "security": [ 96 | { 97 | "bearerAuth": [] 98 | } 99 | ], 100 | "x-codegen-request-body-name": "body" 101 | } 102 | } 103 | }, 104 | "components": { 105 | "schemas": { 106 | "App": { 107 | "type": "object", 108 | "properties": { 109 | "id": { 110 | "type": "integer", 111 | "format": "int64" 112 | }, 113 | "username": { 114 | "type": "string" 115 | }, 116 | "email": { 117 | "type": "string" 118 | }, 119 | "password": { 120 | "type": "string" 121 | } 122 | }, 123 | "xml": { 124 | "name": "User" 125 | } 126 | } 127 | }, 128 | "securitySchemes": { 129 | "bearerAuth": { 130 | "type": "http", 131 | "scheme": "bearer", 132 | "bearerFormat": "JWT" 133 | } 134 | } 135 | } 136 | } 137 | -------------------------------------------------------------------------------- /src/api/v1/swagger/swagger.route.ts: -------------------------------------------------------------------------------- 1 | import express from 'express'; 2 | import swaggerUi from 'swagger-ui-express'; 3 | import swaggerDocument from './swagger.json'; 4 | 5 | const swaggerRouter = express.Router(); 6 | 7 | const options = { 8 | explorer: true, 9 | }; 10 | 11 | swaggerRouter.use('/', swaggerUi.serve); 12 | swaggerRouter.get('/', swaggerUi.setup(swaggerDocument, options)); 13 | 14 | export default swaggerRouter; 15 | -------------------------------------------------------------------------------- /src/api/v1/typedoc/typedoc.route.ts: -------------------------------------------------------------------------------- 1 | import express from 'express'; 2 | import path from 'path'; 3 | 4 | const typedocRouter = express.Router(); 5 | 6 | typedocRouter.use(express.static(path.join(__dirname, '../../../../docs'))); 7 | 8 | typedocRouter.get('/typedoc', (req, res) => { 9 | res.sendFile(path.join(__dirname, '../../../../docs/index.html')); 10 | }); 11 | 12 | export default typedocRouter; 13 | -------------------------------------------------------------------------------- /src/api/v1/user/user.model.ts: -------------------------------------------------------------------------------- 1 | import bcrypt from 'bcryptjs'; 2 | import crypto from 'crypto'; 3 | import dotenv from 'dotenv'; 4 | import jwt from 'jsonwebtoken'; 5 | import mongoose, { 6 | HydratedDocument, 7 | Document, 8 | Model, 9 | Schema, 10 | Types, 11 | } from 'mongoose'; 12 | 13 | import validator from 'validator'; 14 | import {CustomError} from '../../../errors'; 15 | import {apiRoles} from '../../config/roles.config'; 16 | 17 | dotenv.config(); 18 | 19 | if (!process.env.JWT_KEY) { 20 | throw new CustomError( 21 | 404, 22 | 'Please provide a JWT_KEY as global environment variable' 23 | ); 24 | } 25 | 26 | const jwtKey = process.env.JWT_KEY; 27 | 28 | /** 29 | * Define the Google Passport interface 30 | */ 31 | 32 | export interface IGooglePassport { 33 | id: string; 34 | sync: boolean; 35 | tokens: { 36 | accessToken: string; 37 | refreshToken: string; 38 | }; 39 | } 40 | 41 | /** 42 | * define user messages interface 43 | */ 44 | export interface IUserMessages { 45 | title: string; 46 | body: string; 47 | type: string; 48 | read: boolean; 49 | firebaseMessageId: string; 50 | } 51 | 52 | /** 53 | * Define the User model... 54 | */ 55 | export interface IUser { 56 | // isModified(arg0: string): unknown; 57 | _id: Types.ObjectId; 58 | username: string; 59 | fullName: string; 60 | email: string; 61 | password: string; 62 | resetPasswordToken?: string; 63 | resetPasswordExpires?: Date; 64 | google: IGooglePassport; 65 | role: string; 66 | active: boolean; 67 | pictureUrl: string; 68 | pictureBlob: string; 69 | lastLoginDate: Date; 70 | notification: { 71 | fcmPermission: string; 72 | firebaseMessageToken: string; 73 | }; 74 | messages: IUserMessages[]; 75 | featureFlags?: { 76 | [key: string]: string; 77 | }; 78 | } 79 | 80 | /** 81 | * Exporting methods for User 82 | */ 83 | export interface IUserMethods { 84 | toJSON(): Document; 85 | comparePassword(password: string): Promise; 86 | generateVerificationToken(): string; 87 | generatePasswordResetToken(): void; 88 | } 89 | 90 | /** 91 | * Create a new Model type that knows about Methods and stati and IUser... 92 | */ 93 | export interface IUserModel extends Model { 94 | checkExistingField: ( 95 | field: string, 96 | value: string 97 | ) => Promise>; 98 | } 99 | 100 | const MessageSchema = new Schema( 101 | { 102 | title: { 103 | type: String, 104 | required: true, 105 | trim: true, 106 | }, 107 | body: { 108 | type: String, 109 | required: true, 110 | trim: true, 111 | }, 112 | type: { 113 | type: String, 114 | required: true, 115 | trim: true, 116 | }, 117 | read: { 118 | type: Boolean, 119 | default: false, 120 | }, 121 | firebaseMessageId: { 122 | type: String, 123 | }, 124 | }, 125 | { 126 | toJSON: { 127 | virtuals: true, 128 | getters: true, 129 | }, 130 | toObject: { 131 | virtuals: true, 132 | getters: true, 133 | }, 134 | timestamps: true, 135 | } 136 | ); 137 | 138 | const UserSchema = new Schema( 139 | { 140 | username: { 141 | type: String, 142 | required: true, 143 | unique: true, 144 | lowercase: true, 145 | index: true, 146 | }, 147 | fullName: { 148 | type: String, 149 | }, 150 | email: { 151 | type: String, 152 | required: [true, "Email can't be blank"], 153 | unique: true, 154 | lowercase: true, 155 | index: true, 156 | // TODO: Re-enable the validation once migration is completed 157 | validate: [validator.isEmail, 'Please provide an email address'], 158 | match: [/\S+@\S+\.\S+/, 'is invalid'], 159 | trim: true, 160 | }, 161 | password: {type: String, required: true, minlength: 8}, 162 | resetPasswordToken: { 163 | type: String, 164 | required: false, 165 | }, 166 | resetPasswordExpires: { 167 | type: Date, 168 | required: false, 169 | }, 170 | google: { 171 | id: String, 172 | sync: {type: Boolean}, // authorisation to sync with google 173 | tokens: { 174 | accessToken: String, 175 | refreshToken: String, 176 | }, 177 | }, 178 | role: { 179 | type: String, 180 | enum: apiRoles, 181 | default: 'user', 182 | }, 183 | active: { 184 | type: Boolean, 185 | default: true, 186 | }, 187 | pictureUrl: { 188 | type: String, 189 | trim: true, 190 | validate: { 191 | validator: (value: string) => 192 | validator.isURL(value, { 193 | protocols: ['http', 'https', 'ftp'], 194 | require_tld: true, 195 | require_protocol: true, 196 | }), 197 | message: 'Must be a Valid URL', 198 | }, 199 | }, 200 | pictureBlob: { 201 | type: String, 202 | }, 203 | lastLoginDate: {type: Date, required: false, default: null}, 204 | notification: { 205 | fcmPermission: { 206 | type: String, 207 | enum: ['granted', 'denied', 'default'], 208 | default: 'default', 209 | }, 210 | firebaseMessageToken: {type: String, trim: true, default: null}, 211 | }, 212 | messages: [MessageSchema], 213 | featureFlags: { 214 | allowSendEmail: { 215 | type: String, 216 | enum: ['granted', 'denied', 'default'], 217 | default: 'granted', 218 | }, 219 | allowSendSms: { 220 | type: String, 221 | enum: ['granted', 'denied', 'default'], 222 | default: 'granted', 223 | }, 224 | betaFeatures: { 225 | type: String, 226 | enum: ['granted', 'denied', 'default'], 227 | default: 'default', 228 | }, 229 | darkMode: { 230 | type: String, 231 | enum: ['granted', 'denied', 'default'], 232 | default: 'default', 233 | }, 234 | personalization: { 235 | type: String, 236 | enum: ['granted', 'denied', 'default'], 237 | default: 'default', 238 | }, 239 | geolocationBased: { 240 | type: String, 241 | enum: ['granted', 'denied', 'default'], 242 | default: 'default', 243 | }, 244 | security: { 245 | type: String, 246 | enum: ['granted', 'denied', 'default'], 247 | default: 'default', 248 | }, 249 | payment: { 250 | type: String, 251 | enum: ['granted', 'denied', 'default'], 252 | default: 'default', 253 | }, 254 | }, 255 | }, 256 | { 257 | toJSON: { 258 | virtuals: true, 259 | getters: true, 260 | }, 261 | toObject: { 262 | virtuals: true, 263 | getters: true, 264 | }, 265 | timestamps: true, 266 | } 267 | ); 268 | 269 | UserSchema.index({username: 1, email: 1, googleId: 1}); 270 | 271 | /** 272 | * MONGOOSE MIDDLEWARE 273 | */ 274 | UserSchema.pre>( 275 | 'save', 276 | async function (next) { 277 | if (!this.isModified('password')) return next(); 278 | 279 | const salt = await bcrypt.genSalt(10); 280 | this.password = await bcrypt.hash(this.password, salt); 281 | next(); 282 | } 283 | ); 284 | 285 | /** 286 | * MONGOOSE METHODS 287 | */ 288 | UserSchema.methods.toJSON = function () { 289 | const userObj = this.toObject(); 290 | userObj.id = userObj._id; // remap _id to id 291 | 292 | delete userObj._id; 293 | delete userObj.password; 294 | delete userObj.__v; 295 | return userObj; 296 | }; 297 | 298 | UserSchema.methods.comparePassword = async function (password: string) { 299 | return bcrypt.compare(password, this.password); 300 | }; 301 | 302 | UserSchema.methods.generateVerificationToken = function () { 303 | return jwt.sign( 304 | { 305 | id: this._id, 306 | email: this.email, 307 | active: this.active, 308 | role: this.role, 309 | employeeId: this.employeeId, 310 | clientId: this.clientId, 311 | vendorId: this.vendorId, 312 | deleted: this.deleted, 313 | featureFlags: this.featureFlags, 314 | }, 315 | jwtKey, 316 | { 317 | expiresIn: '1d', 318 | // algorithm: 'RS256', 319 | } 320 | ); 321 | }; 322 | 323 | UserSchema.methods.generatePasswordResetToken = async function () { 324 | this.resetPasswordToken = crypto.randomBytes(20).toString('hex'); 325 | this.resetPasswordExpires = Date.now() + 3600000; // expires in an hour 326 | }; 327 | 328 | /** 329 | * MONGOOSE STATIC METHODS 330 | */ 331 | UserSchema.statics.checkExistingField = async function ( 332 | field: string, 333 | value: string 334 | ) { 335 | return this.findOne({[`${field}`]: value}); 336 | }; 337 | 338 | const User = mongoose.model('User', UserSchema, 'users'); 339 | 340 | export default User; 341 | -------------------------------------------------------------------------------- /src/bin/server.ts: -------------------------------------------------------------------------------- 1 | import http from 'http'; 2 | import app from '../index'; 3 | import Logger from '../lib/logger'; 4 | 5 | const port = process.env.PORT || 3000; 6 | 7 | app.set('port', port); 8 | 9 | const server = http.createServer(app); 10 | 11 | const onError = (error: NodeJS.ErrnoException): void => { 12 | if (error.syscall !== 'listen') { 13 | throw error; 14 | } 15 | 16 | const bind = typeof port === 'string' ? `Pipe ${port}` : `Port ${port}`; 17 | 18 | // handle specific listen errors with friendly messages 19 | switch (error.code) { 20 | case 'EACCES': 21 | console.error(`${bind} requires elevated privileges`); 22 | process.exit(1); 23 | case 'EADDRINUSE': 24 | console.error(`${bind} is already in use`); 25 | process.exit(1); 26 | default: 27 | throw error; 28 | } 29 | }; 30 | 31 | const onListening = (): void => { 32 | const addr = server.address(); 33 | const bind = typeof addr === 'string' ? `pipe ${addr}` : `port ${addr?.port}`; 34 | Logger.debug(`Listening on ${bind}`); 35 | 36 | Logger.info(`🚀 Server listening on port ${bind}`); 37 | }; 38 | 39 | server.listen(port); 40 | server.on('error', onError); 41 | server.on('listening', onListening); 42 | -------------------------------------------------------------------------------- /src/config/gcloud/README.md: -------------------------------------------------------------------------------- 1 | This folder contains all the config files to link the project to Google Cloud Platform 2 | 3 | ## **Basic Concepts** 4 | 5 | ### 1. SERVICE ACCOUNT 6 | 7 | To manage things like upload file to Cloud Storage, etc... 8 | 9 | - Go to GCP and create a new project 10 | - Go to Service Account and create one service account with a descriptive name 11 | - Assign a role you want to the service account (i.e. "Storage Admin") 12 | - Create a JSON key, download and put in `./src/config/gcloud` renaming google-application-credentials.json 13 | - In the .env file specify the path to this file like 14 | - In this way we can configure the app to be linked with the service account created 15 | 16 | ```dotenv 17 | GOOGLE_APPLICATION_CREDENTIALS='./../config/gcloud/google-application-credentials.json' 18 | ``` 19 | 20 | ### 2. OAUTH Client ID Account 21 | 22 | i.e. Used to create passport strategies with google 23 | 24 | - Go to GCP 25 | - Go to API & Services and create one OAuth Client ID account 26 | - Choose Application Type -> Web application 27 | - Name the web client (i.e. Dev Test - Web Client Oauth 2.0 Account) 28 | - In Authorized javascript origins put 29 | 30 | ``` 31 | Authorized JavaScript origins 32 | 33 | URIs* 34 | 35 | http://localhost:3000 36 | 37 | Authorized redirect URIs 38 | 39 | URIs* 40 | 41 | http://localhost:3000/auth/google/callback 42 | ``` 43 | 44 | - Copy the google client id and the google client secret and put them in the .env file like 45 | 46 | ```dotenv 47 | GOOGLE_CLIENT_ID='your-google-client-id' 48 | GOOGLE_CLIENT_SECRET='your-client-secret' 49 | ``` 50 | 51 | - Download the json file and rename to google-web-client-secret.json 52 | - Then if you need to import the file in a middleware like passport or something else you can do 53 | 54 | ```javascript 55 | const OAuth2Data = require('./google-web-client-secret.json'); 56 | 57 | const app = express(); 58 | 59 | const CLIENT_ID = OAuth2Data.client.id; 60 | const CLIENT_SECRET = OAuth2Data.client.secret; 61 | const REDIRECT_URL = OAuth2Data.client.redirect; 62 | 63 | const oAuth2Client = new google.auth.OAuth2( 64 | CLIENT_ID, 65 | CLIENT_SECRET, 66 | REDIRECT_URL, 67 | ); 68 | var authed = false; 69 | 70 | app.get('/', (req, res) => { 71 | if (!authed) { 72 | // Generate an OAuth URL and redirect there 73 | const url = oAuth2Client.generateAuthUrl({ 74 | access_type: 'offline', 75 | scope: 'https://www.googleapis.com/auth/gmail.readonly', 76 | }); 77 | console.log(url); 78 | res.redirect(url); 79 | } else { 80 | const gmail = google.gmail({ version: 'v1', auth: oAuth2Client }); 81 | gmail.users.labels.list( 82 | { 83 | userId: 'me', 84 | }, 85 | (err, res) => { 86 | if (err) return console.log('The API returned an error: ' + err); 87 | const labels = res.data.labels; 88 | if (labels.length) { 89 | console.log('Labels:'); 90 | labels.forEach((label) => { 91 | console.log(`- ${label.name}`); 92 | }); 93 | } else { 94 | console.log('No labels found.'); 95 | } 96 | }, 97 | ); 98 | res.send('Logged in'); 99 | } 100 | }); 101 | 102 | app.get('/auth/google/callback', function (req, res) { 103 | const code = req.query.code; 104 | if (code) { 105 | // Get an access token based on our OAuth code 106 | oAuth2Client.getToken(code, function (err, tokens) { 107 | if (err) { 108 | console.log('Error authenticating'); 109 | console.log(err); 110 | } else { 111 | console.log('Successfully authenticated'); 112 | oAuth2Client.setCredentials(tokens); 113 | authed = true; 114 | res.redirect('/'); 115 | } 116 | }); 117 | } 118 | }); 119 | ``` 120 | -------------------------------------------------------------------------------- /src/config/gcloud/google-application-credentials-example.json: -------------------------------------------------------------------------------- 1 | { 2 | "type": "service_account", 3 | "project_id": "your-project-id", 4 | "private_key_id": "your-private-key-id", 5 | "private_key": "your-private-key", 6 | "client_email": "service-account-email", 7 | "client_id": "your-client-id", 8 | "auth_uri": "https://accounts.google.com/o/oauth2/auth", 9 | "token_uri": "https://oauth2.googleapis.com/token", 10 | "auth_provider_x509_cert_url": "https://www.googleapis.com/oauth2/v1/certs", 11 | "client_x509_cert_url": "client_x509_cert_url" 12 | } 13 | -------------------------------------------------------------------------------- /src/config/gcloud/google-web-client-secret-example.json: -------------------------------------------------------------------------------- 1 | { 2 | "web": { 3 | "client_id": "your-google-client-id", 4 | "project_id": "your-google-project-id", 5 | "auth_uri": "https://accounts.google.com/o/oauth2/auth", 6 | "token_uri": "https://oauth2.googleapis.com/token", 7 | "auth_provider_x509_cert_url": "https://www.googleapis.com/oauth2/v1/certs", 8 | "client_secret": "your-google-client-secret", 9 | "redirect_uris": ["http://localhost:3000/auth/google/callback"], 10 | "javascript_origins": ["http://localhost:3000"] 11 | } 12 | } 13 | -------------------------------------------------------------------------------- /src/config/mongodb.config.ts: -------------------------------------------------------------------------------- 1 | import mongoose from 'mongoose'; 2 | import Logger from '../lib/logger'; 3 | import {CustomError} from '../errors/CustomError.error'; 4 | 5 | mongoose.set('strictQuery', true); 6 | 7 | mongoose.connection.on('connected', () => { 8 | Logger.info('MongoDB connection established'); 9 | }); 10 | 11 | mongoose.connection.on('reconnected', () => { 12 | Logger.warn('MongoDB reconnected'); 13 | }); 14 | 15 | mongoose.connection.on('disconnected', () => { 16 | Logger.warn('MongoDB disconnected'); 17 | }); 18 | 19 | mongoose.connection.on('close', () => { 20 | Logger.warn('MongoDB connection closed'); 21 | }); 22 | 23 | mongoose.connection.on('error', (error: string) => { 24 | Logger.error(`🤦🏻 MongoDB ERROR: ${error}`); 25 | 26 | process.exit(1); 27 | }); 28 | 29 | export default { 30 | mongoDbProdConnection: async () => { 31 | try { 32 | await mongoose.connect(process.env.MONGO_URI); 33 | Logger.info(`Connected to db: ${mongoose.connection.name}`); 34 | } catch (error) { 35 | Logger.error(`Production - MongoDB connection error. ${error}`); 36 | if (error instanceof CustomError) { 37 | throw new CustomError(error.statusCode, error.message); 38 | } 39 | } 40 | }, 41 | mongoDBTestConnection: async () => { 42 | try { 43 | await mongoose.connect(process.env.MONGO_URI_TEST); 44 | Logger.info(`Connected to db: ${mongoose.connection.name}`); 45 | } catch (error) { 46 | Logger.error('Test - MongoDB connection error' + error); 47 | if (error instanceof CustomError) { 48 | throw new CustomError(500, error.message); 49 | } 50 | } 51 | }, 52 | }; 53 | -------------------------------------------------------------------------------- /src/config/mysql.config.ts: -------------------------------------------------------------------------------- 1 | import {createPool, Pool} from 'mysql2'; 2 | 3 | /** 4 | * If you would like to run the inserts asynchronously, you will want createPool. 5 | * Because in with createConnection, there is only 1 connection and all queries 6 | * executed on that connection are queued, and that is not really asynchronous. 7 | * (Async from node.js perspective, but the queries are executed sequentially) 8 | * @type {Pool} 9 | */ 10 | const mySqlTestConnection: Pool = createPool({ 11 | host: process.env.MYSQL_HOST_STAGE, 12 | user: process.env.MYSQL_USER_STAGE, 13 | password: process.env.MYSQL_PASSWORD_STAGE, 14 | database: process.env.MYSQL_DB_STAGE, 15 | waitForConnections: true, 16 | connectionLimit: 10, 17 | queueLimit: 0, 18 | // socketPath: 19 | // process.env.NODE_ENV !== 'production' ? '' : process.env.MYSQL_SOCKET_STAGE, 20 | }); 21 | 22 | const mySqlProdConnection: Pool = createPool({ 23 | host: process.env.MYSQL_HOST_PROD, 24 | user: process.env.MYSQL_USER_PROD, 25 | password: process.env.MYSQL_PASSWORD_PROD, 26 | database: process.env.MYSQL_DB_PROD, 27 | waitForConnections: true, 28 | connectionLimit: 10, 29 | queueLimit: 0, 30 | // socketPath: 31 | // process.env.NODE_ENV !== 'production' ? '' : process.env.MYSQL_SOCKET_PROD, 32 | }); 33 | 34 | // TODO: When ready uncomment this and use the prod db 35 | export const mySqlConnection = 36 | process.env.NODE_ENV !== 'production' 37 | ? mySqlTestConnection.promise() 38 | : mySqlProdConnection.promise(); 39 | 40 | /** 41 | * Example of query on pre-existing database 42 | */ 43 | 44 | // const query = `# SELECT * FROM users`; 45 | // const [rows] = await connection.execute(query, [limit]); 46 | -------------------------------------------------------------------------------- /src/config/passport.config.ts: -------------------------------------------------------------------------------- 1 | import dotenv from 'dotenv'; 2 | import passport from 'passport'; 3 | import passportLocal, {IStrategyOptionsWithRequest} from 'passport-local'; 4 | import passportGoogle from 'passport-google-oauth20'; 5 | 6 | import User, {IUser} from '../api/v1/user/user.model'; 7 | import Logger from '../lib/logger'; 8 | import {ICustomExpressRequest} from '../middlewares/currentUser.middleware'; 9 | 10 | dotenv.config(); 11 | 12 | const {GOOGLE_CLIENT_ID, GOOGLE_CLIENT_SECRET} = process.env; 13 | 14 | const LocalStrategy = passportLocal.Strategy; 15 | const GoogleStrategy = passportGoogle.Strategy; 16 | 17 | passport.serializeUser((user, done) => { 18 | /* Store only the id in passport req.session.passport.user */ 19 | done(null, user); 20 | }); 21 | 22 | passport.deserializeUser((id, done) => { 23 | User.findOne({_id: id}, (err: NativeError, user: IUser) => { 24 | done(null, user); 25 | }); 26 | }); 27 | 28 | const authFields: IStrategyOptionsWithRequest = { 29 | usernameField: 'email', 30 | passwordField: 'password', 31 | passReqToCallback: true, 32 | }; 33 | 34 | /** 35 | * Login strategy 36 | */ 37 | passport.use( 38 | 'login', 39 | new LocalStrategy( 40 | authFields, 41 | async (req: ICustomExpressRequest, email, password, cb) => { 42 | try { 43 | const user = await User.findOne({ 44 | $or: [{email}, {username: email.toLowerCase()}], 45 | }).exec(); 46 | 47 | if (!user || !user.password) { 48 | return cb(null, false, {message: 'User not found.'}); 49 | } 50 | 51 | const checkPassword = await user.comparePassword(password); 52 | 53 | if (!checkPassword) { 54 | return cb(null, false, {message: 'Incorrect email or password.'}); 55 | } 56 | 57 | if (!user || !user.active) { 58 | return cb(null, false, {message: 'Account is deactivated.'}); 59 | } 60 | 61 | const {active} = user; 62 | 63 | if (!active) { 64 | return cb(null, false, {message: 'Account is deactivated.'}); 65 | } 66 | 67 | user.lastLoginDate = new Date(); 68 | await user.save(); 69 | 70 | return cb(null, user, {message: 'Logged In Successfully'}); 71 | } catch (err: unknown) { 72 | if (err instanceof Error) { 73 | Logger.debug(err); 74 | return cb(null, false, {message: err.message}); 75 | } 76 | } 77 | } 78 | ) 79 | ); 80 | 81 | /** 82 | * Sign up strategy 83 | */ 84 | passport.use( 85 | 'signup', 86 | new LocalStrategy(authFields, async (req, email, password, cb) => { 87 | try { 88 | const checkEmail = await User.checkExistingField('email', email); 89 | 90 | if (checkEmail) { 91 | return cb(null, false, { 92 | message: 'Email already registered, log in instead', 93 | }); 94 | } 95 | 96 | const checkUserName = await User.checkExistingField( 97 | 'username', 98 | req.body.username 99 | ); 100 | 101 | if (checkUserName) { 102 | return cb(null, false, { 103 | message: 'Username exists, please try another', 104 | }); 105 | } 106 | 107 | const newUser = new User(); 108 | newUser.email = req.body.email; 109 | newUser.password = req.body.password; 110 | newUser.username = req.body.username; 111 | 112 | await newUser.save(); 113 | 114 | return cb(null, newUser); 115 | } catch (err: unknown) { 116 | if (err instanceof Error) { 117 | Logger.debug(err); 118 | return cb(null, false, {message: err.message}); 119 | } 120 | } 121 | }) 122 | ); 123 | 124 | /** 125 | * The password Reset method is with a token generated 126 | */ 127 | passport.use( 128 | 'reset-password', 129 | new LocalStrategy(authFields, async (req, email, password, cb) => { 130 | try { 131 | const {token} = await req.body; 132 | 133 | const user = await User.findOne({ 134 | resetPasswordToken: token, 135 | resetPasswordExpires: {$gt: Date.now()}, 136 | }).exec(); 137 | 138 | if (!user) { 139 | return cb(null, false, { 140 | message: 'Password reset token is invalid or has expired.', 141 | }); 142 | } 143 | 144 | user.password = password; 145 | user.resetPasswordToken = undefined; 146 | user.resetPasswordExpires = undefined; 147 | 148 | await user.save(); 149 | 150 | return cb(null, user, {message: 'Password Changed Successfully'}); 151 | } catch (err: unknown) { 152 | if (err instanceof Error) { 153 | Logger.debug(err); 154 | return cb(null, false, {message: err.message}); 155 | } 156 | } 157 | }) 158 | ); 159 | 160 | /** 161 | * Google strategy 162 | */ 163 | passport.use( 164 | 'google', 165 | new GoogleStrategy( 166 | { 167 | clientID: GOOGLE_CLIENT_ID, 168 | clientSecret: GOOGLE_CLIENT_SECRET, 169 | callbackURL: `/api/v1/${process.env.SERVICE_NAME}/auth/google/callback`, 170 | }, 171 | async (accessToken, refreshToken, profile, done) => { 172 | try { 173 | const username = profile.emails && profile?.emails[0]?.value; 174 | const email = profile.emails && profile?.emails[0]?.value; 175 | const pictureUrl = profile.photos && profile.photos[0].value; 176 | 177 | // 1. Check if user has already a Google profile and return it 178 | const googleUser = await User.findOne({ 179 | 'google.id': profile.id, 180 | }).exec(); 181 | 182 | if (googleUser) { 183 | return done(null, googleUser, {statusCode: 200}); 184 | } 185 | 186 | // 2. If user email is in the db and tries to google auth 187 | // update only with Google id and token 188 | const checkEmail = await User.checkExistingField( 189 | 'email', 190 | email 191 | ); 192 | 193 | const fieldsToUpdate = { 194 | pictureUrl, 195 | 'google.id': profile.id, 196 | 'google.sync': true, 197 | 'google.tokens.accessToken': accessToken, 198 | 'google.tokens.refreshToken': refreshToken, 199 | }; 200 | 201 | if (checkEmail) { 202 | const user = await User.findByIdAndUpdate( 203 | checkEmail._id, 204 | fieldsToUpdate, 205 | {new: true} 206 | ).exec(); 207 | 208 | return done(null, user, {statusCode: 200}); 209 | } 210 | 211 | // 3. If nothing before is verified create a new User 212 | const userObj = new User({ 213 | username, // the same as the email 214 | email, 215 | pictureUrl, 216 | password: accessToken, 217 | 'google.id': profile.id, 218 | 'google.sync': true, 219 | 'google.tokens.accessToken': accessToken, 220 | 'google.tokens.refreshToken': refreshToken, 221 | }); 222 | 223 | const user = await userObj.save({validateBeforeSave: false}); 224 | 225 | return done(null, user, {statusCode: 201}); 226 | } catch (err: unknown) { 227 | if (err instanceof Error) { 228 | Logger.debug(err); 229 | return done(err, false); 230 | } 231 | } 232 | } 233 | ) 234 | ); 235 | 236 | export default passport; 237 | -------------------------------------------------------------------------------- /src/errors/CustomError.error.ts: -------------------------------------------------------------------------------- 1 | export class CustomError extends Error { 2 | public statusCode: number; 3 | public message: string; 4 | 5 | constructor(statusCode: number, message: string) { 6 | super(message); 7 | Error.captureStackTrace(this, this.constructor); 8 | this.name = this.constructor.name; 9 | this.statusCode = statusCode; 10 | this.message = message; 11 | } 12 | } 13 | -------------------------------------------------------------------------------- /src/errors/NotAuthorized.error.ts: -------------------------------------------------------------------------------- 1 | import {CustomError} from './CustomError.error'; 2 | 3 | export class NotAuthorizedError extends CustomError { 4 | constructor(message: string) { 5 | super(401, message); 6 | } 7 | } 8 | -------------------------------------------------------------------------------- /src/errors/NotFound.error.ts: -------------------------------------------------------------------------------- 1 | import {CustomError} from './CustomError.error'; 2 | 3 | export class NotFoundError extends CustomError { 4 | constructor(message: string) { 5 | super(404, message); 6 | } 7 | } 8 | -------------------------------------------------------------------------------- /src/errors/index.ts: -------------------------------------------------------------------------------- 1 | export * from './CustomError.error'; 2 | export * from './NotAuthorized.error'; 3 | export * from './NotFound.error'; 4 | -------------------------------------------------------------------------------- /src/index.ts: -------------------------------------------------------------------------------- 1 | import dotenv from 'dotenv'; 2 | import express from 'express'; 3 | import compression from 'compression'; 4 | import helmet from 'helmet'; 5 | import xss from 'xss-clean'; 6 | import cors from 'cors'; 7 | import cookieParser from 'cookie-parser'; 8 | import mongoSanitize from 'express-mongo-sanitize'; 9 | import session from 'express-session'; 10 | import MongoStore from 'connect-mongo'; 11 | import passport from 'passport'; 12 | import {initializeApp, applicationDefault} from 'firebase-admin/app'; 13 | import mongoose from 'mongoose'; 14 | import Logger from './lib/logger'; 15 | import morganMiddleware from './middlewares/morgan.middleware'; 16 | import {currentUserMiddleware} from './middlewares/currentUser.middleware'; 17 | import errorHandleMiddleware from './middlewares/errorHandler.middleware'; 18 | import {NotFoundError} from './errors/NotFound.error'; 19 | import apiV1Router from './api/v1/index.route'; 20 | import mongoDbConfiguration from './config/mongodb.config'; 21 | // import path from 'path'; 22 | 23 | dotenv.config(); 24 | 25 | const {mongoDBTestConnection, mongoDbProdConnection} = mongoDbConfiguration; 26 | 27 | /** 28 | * Connect to MongoDB 29 | */ 30 | if (process.env.NODE_ENV === 'development') { 31 | mongoDBTestConnection().catch(error => { 32 | Logger.error(error.message); 33 | }); 34 | } else { 35 | mongoDbProdConnection().catch(error => { 36 | Logger.error(error.message); 37 | }); 38 | } 39 | 40 | /** 41 | * Import agenda jobs 42 | */ 43 | import './jobs/agenda'; 44 | 45 | /** 46 | * Initialize Firebase Admin SDK 47 | */ 48 | initializeApp({ 49 | credential: applicationDefault(), 50 | }); 51 | 52 | /** 53 | * Initialize express app 54 | */ 55 | const app = express(); 56 | 57 | // trust proxy 58 | app.set('trust proxy', true); 59 | 60 | // logger middleware 61 | app.use(morganMiddleware); 62 | 63 | // set security HTTP headers 64 | app.use( 65 | helmet({ 66 | contentSecurityPolicy: false, 67 | crossOriginEmbedderPolicy: false, // set this false to prevent bug in new browser 68 | }) 69 | ); 70 | 71 | // parse body request 72 | app.use(express.json()); 73 | 74 | // parse urlencoded request 75 | app.use(express.urlencoded({extended: true})); 76 | 77 | // sanitize 78 | app.use(xss()); 79 | app.use(mongoSanitize()); 80 | 81 | // use GZIP compression 82 | app.use(compression()); 83 | 84 | // parse cookie 85 | app.use(cookieParser()); 86 | 87 | // Cookie policy definition 88 | const COOKIE_MAX_AGE: string | number = 89 | process.env.COOKIE_MAX_AGE || 1000 * 60 * 60 * 24; 90 | const SECRET = process.env.JWT_KEY; 91 | 92 | /** 93 | * FIX: 94 | * We reusing the mongoose connection to avoid the error: 95 | * workaround for Jest that crashes when using mongoUrl option 96 | */ 97 | const mongoStore = MongoStore.create({ 98 | client: mongoose.connection.getClient(), 99 | stringify: false, 100 | autoRemove: 'interval', 101 | autoRemoveInterval: 1, 102 | }); 103 | 104 | app.use( 105 | session({ 106 | cookie: { 107 | // secure: DEFAULT_ENV === 'production', 108 | maxAge: COOKIE_MAX_AGE, 109 | httpOnly: true, 110 | sameSite: 'lax', 111 | }, 112 | secret: SECRET, 113 | resave: false, 114 | saveUninitialized: false, 115 | /* Store session in mongodb */ 116 | store: mongoStore, 117 | unset: 'destroy', 118 | }) 119 | ); 120 | 121 | /** 122 | * currentUser middleware. It will set the current user in the request 123 | */ 124 | app.use(currentUserMiddleware); 125 | 126 | /** 127 | * Initialize Passport and pass the session to session storage of express 128 | * Strategies are called in the auth router 129 | * and in ./src/config/passport.config.ts 130 | */ 131 | app.use(passport.initialize()); 132 | app.use(passport.session()); 133 | 134 | /** 135 | * CORS configuration 136 | */ 137 | app.use( 138 | cors({ 139 | origin: process.env.CLIENT_URL || '*', // allow CORS 140 | methods: 'GET,HEAD,PUT,PATCH,POST,DELETE', 141 | credentials: true, // allow session cookie from browser to pass through 142 | }) 143 | ); 144 | 145 | /** 146 | * Headers configuration 147 | */ 148 | app.use((req, res, next) => { 149 | res.header('Access-Control-Allow-Origin', process.env.CLIENT_URL); // Update to match the domain you will make the request from 150 | res.header( 151 | 'Access-Control-Allow-Headers', 152 | 'Origin, X-Requested-With, Content-Type, Accept' 153 | ); 154 | next(); 155 | }); 156 | 157 | /** 158 | * This MIDDLEWARE is to serve the public client build and redirect everything 159 | * to the client index.html. Replace the original one with public. Move build 160 | * inside the server folder and activate also the catchall middleware. 161 | */ 162 | // app.use( 163 | // express.static(path.join(__dirname, '../public'), { 164 | // index: 'index.html', 165 | // }) 166 | // ); 167 | 168 | /** 169 | * Routes definitions 170 | */ 171 | app.use(`/api/v1/${process.env.SERVICE_NAME}`, apiV1Router); 172 | 173 | /** 174 | * Catchall middleware. Activate to serve every route in 175 | * the public directory i.e. if we have a build of React 176 | */ 177 | // app.use((req, res) => 178 | // res.sendFile(path.resolve(path.join(__dirname, '../public/index.html'))) 179 | // ); 180 | 181 | /** 182 | * Catchall middleware. Activate to serve every route in throw an error if the route is not found 183 | */ 184 | app.all('*', () => { 185 | throw new NotFoundError('Route not found'); 186 | }); 187 | 188 | /** 189 | * Global Error handler middleware 190 | */ 191 | app.use(errorHandleMiddleware); 192 | 193 | export default app; 194 | -------------------------------------------------------------------------------- /src/jobs/agenda.ts: -------------------------------------------------------------------------------- 1 | import dotenv from 'dotenv'; 2 | import {Agenda} from '@hokify/agenda'; 3 | import Logger from '../lib/logger'; 4 | 5 | dotenv.config(); 6 | 7 | const {MONGO_URI, MONGO_URI_TEST} = process.env; 8 | 9 | interface AgendaDBOptions { 10 | address: string; 11 | collection?: string; 12 | // eslint-disable-next-line @typescript-eslint/no-explicit-any 13 | options?: any; 14 | ensureIndex?: boolean; 15 | } 16 | 17 | const agenda = new Agenda({ 18 | db: { 19 | address: 20 | process.env.NODE_ENV === 'production' 21 | ? MONGO_URI 22 | : MONGO_URI_TEST, 23 | collection: 'agendaJobs', 24 | options: { 25 | useNewUrlParser: true, 26 | useUnifiedTopology: true, 27 | }, 28 | ensureIndex: true, 29 | // maxConcurrency: 20, 30 | } as AgendaDBOptions, 31 | }); 32 | 33 | /** 34 | * CRON JOB 35 | * @description Check if agenda is working 36 | */ 37 | agenda.define('check_agenda_status', async job => { 38 | Logger.info('Agenda is working!', job.attrs.data); 39 | }); 40 | 41 | (async function () { 42 | const dailyAgendaStatusCheck = agenda.create('check_agenda_status'); 43 | 44 | await agenda.start(); 45 | 46 | dailyAgendaStatusCheck.repeatEvery('0 8 * * 1-7', { 47 | skipImmediate: true, 48 | timezone: 'Europe/Rome', 49 | }); 50 | 51 | dailyAgendaStatusCheck.unique({jobId: 0}); 52 | 53 | await dailyAgendaStatusCheck.save(); 54 | })(); 55 | -------------------------------------------------------------------------------- /src/lib/logger.ts: -------------------------------------------------------------------------------- 1 | import winston from 'winston'; 2 | 3 | /** 4 | * Define your severity levels. 5 | * With them, You can create log files, 6 | *see or hide levels based on the running ENV. 7 | */ 8 | const levels = { 9 | error: 0, 10 | warn: 1, 11 | info: 2, 12 | http: 3, 13 | debug: 4, 14 | }; 15 | 16 | /** 17 | * This method set the current severity based on 18 | * the current NODE_ENV: show all the log levels 19 | * if the server was run in development mode; otherwise, 20 | * if it was run in production, show only warn and error messages. 21 | */ 22 | const level = () => { 23 | const env = process.env.NODE_ENV || 'development'; 24 | const isDevelopment = env === 'development'; 25 | return isDevelopment ? 'debug' : 'warn'; 26 | }; 27 | 28 | /** 29 | * Define different colors for each level. 30 | * Colors make the log message more visible, 31 | * adding the ability to focus or ignore messages. 32 | */ 33 | const colors = { 34 | error: 'red', 35 | warn: 'yellow', 36 | info: 'green', 37 | http: 'magenta', 38 | debug: 'white', 39 | }; 40 | 41 | /** 42 | * Tell winston that you want to link the colors 43 | * defined above to the severity levels. 44 | */ 45 | winston.addColors(colors); 46 | 47 | // Chose the aspect of your log customizing the log format. 48 | const format = winston.format.combine( 49 | // Add the message timestamp with the preferred format 50 | winston.format.timestamp({format: 'YYYY-MM-DD HH:mm:ss:ms'}), 51 | /** 52 | * Tell Winston that the logs must be colored but 53 | * we bypass this global formatting colorize because generates 54 | * wrong output characters in file. Add in transports 55 | */ 56 | // winston.format.colorize({all: true}), 57 | // Define the format of the message showing the timestamp, the level and the message 58 | winston.format.printf( 59 | info => `${info.timestamp} ${info.level}: ${info.message}` 60 | ) 61 | ); 62 | 63 | /** 64 | * Define which transports the logger must use to print out messages. 65 | * In this example, we are using three different transports 66 | */ 67 | const transports = [ 68 | // Allow the use the console to print the messages 69 | new winston.transports.Console({ 70 | format: winston.format.combine( 71 | // Integration to format. Tell Winston that the console logs must be colored 72 | winston.format.colorize({all: true}) 73 | ), 74 | }), 75 | // Allow to print all the error level messages inside the error.log file 76 | new winston.transports.File({ 77 | filename: 'logs/error.log', 78 | level: 'error', 79 | }), 80 | /** 81 | * Allow to print all the error message inside the all.log file 82 | * (also the error log that are also printed inside the error.log( 83 | */ 84 | new winston.transports.File({filename: 'logs/all.log'}), 85 | ]; 86 | 87 | /** 88 | * Create the logger instance that has to be exported 89 | * and used to log messages. 90 | */ 91 | const Logger = winston.createLogger({ 92 | level: level(), 93 | levels, 94 | format, 95 | transports, 96 | }); 97 | 98 | export default Logger; 99 | -------------------------------------------------------------------------------- /src/middlewares/apiRateLimit.middleware.ts: -------------------------------------------------------------------------------- 1 | import {Response} from 'express'; 2 | import rateLimit from 'express-rate-limit'; 3 | import {ICustomExpressRequest} from './currentUser.middleware'; 4 | 5 | /** 6 | * Rate limiter for api v1 7 | * @see https://www.npmjs.com/package/express-rate-limit 8 | * @description 1000 requests per 1 minute for production 9 | */ 10 | const apiV1RateLimiter = rateLimit({ 11 | windowMs: 1 * 60 * 1000, // 1 minute 12 | max: 200, // Limit each IP to 200 requests per `window` (here, per 1 minute) 13 | standardHeaders: true, // Return rate limit info in the `RateLimit-*` headers 14 | legacyHeaders: false, // Disable the `X-RateLimit-*` headers 15 | message: async (req: ICustomExpressRequest, res: Response) => { 16 | return res.status(429).json({ 17 | status: 'error', 18 | message: 'You have exceeded the 100 requests in 1 minute limit!', 19 | }); 20 | }, 21 | }); 22 | 23 | /** 24 | * Rate limiter for development route as typedoc and swagger 25 | * @description 1000 requests per 1 hour for development 26 | */ 27 | const devlopmentApiLimiter = rateLimit({ 28 | windowMs: 60 * 60 * 1000, // 59 minute 29 | max: 1000, // Limit each IP to 1000 requests per `window` (here, per 1 hour) 30 | standardHeaders: true, // Return rate limit info in the `RateLimit-*` headers 31 | legacyHeaders: false, // Disable the `X-RateLimit-*` headers 32 | message: async (req: ICustomExpressRequest, res: Response) => { 33 | return res.status(429).json({ 34 | status: 'error', 35 | message: 'Too many requests, please try again in 10 minutes.', 36 | }); 37 | }, 38 | }); 39 | 40 | /** 41 | * Rate limiter for recover password 42 | */ 43 | const recoverPasswordApiLimiter = rateLimit({ 44 | windowMs: 1 * 60 * 1000, // 5 minute 45 | max: 1, // Limit each IP to 1020 requests per `window` (here, per 1 minute) 46 | standardHeaders: true, // Return rate limit info in the `RateLimit-*` headers 47 | legacyHeaders: false, // Disable the `X-RateLimit-*` headers 48 | message: async (req: ICustomExpressRequest, res: Response) => { 49 | return res.status(429).json({ 50 | status: 'error', 51 | message: 52 | 'Too many requests to recover password, please try again in 1 minute.', 53 | }); 54 | }, 55 | }); 56 | 57 | /** 58 | * Rate limiter for reset password 59 | */ 60 | const resetPasswordApiLimiter = rateLimit({ 61 | windowMs: 1 * 60 * 1000, // 1 minute 62 | max: 10, // Limit each IP to 10 requests per `window` (here, per 1 minute) 63 | standardHeaders: true, // Return rate limit info in the `RateLimit-*` headers 64 | legacyHeaders: false, // Disable the `X-RateLimit-*` headers 65 | message: async (req: ICustomExpressRequest, res: Response) => { 66 | return res.status(429).json({ 67 | status: 'error', 68 | message: 69 | 'Too many requests to reset password, please try again in 1 minute.', 70 | }); 71 | }, 72 | }); 73 | 74 | export { 75 | apiV1RateLimiter, 76 | devlopmentApiLimiter, 77 | recoverPasswordApiLimiter, 78 | resetPasswordApiLimiter, 79 | }; 80 | -------------------------------------------------------------------------------- /src/middlewares/catchAsyncHandler.middleware.ts: -------------------------------------------------------------------------------- 1 | import {NextFunction, Response} from 'express'; 2 | import {ICustomExpressRequest} from './currentUser.middleware'; 3 | 4 | /** 5 | * A function that takes a request, response, and next function as parameters. 6 | */ 7 | export default (catchAsyncHandler: Function) => 8 | async ( 9 | request: ICustomExpressRequest, 10 | response: Response, 11 | next: NextFunction 12 | ): Promise => { 13 | try { 14 | catchAsyncHandler(request, response, next); 15 | } catch (error) { 16 | return next(error); 17 | } 18 | }; 19 | -------------------------------------------------------------------------------- /src/middlewares/currentUser.middleware.ts: -------------------------------------------------------------------------------- 1 | /** 2 | * This middleware differentiate from the authenticate one 3 | * because is called after the authentication to retrieve 4 | * the jwt token stored in the cookie. This is useful to be 5 | * exported in a shared library 6 | */ 7 | import {NextFunction, Request, Response} from 'express'; 8 | import jwt from 'jsonwebtoken'; 9 | 10 | export interface ICurrentUserPayload { 11 | id: string; 12 | email: string; 13 | active: boolean; 14 | role: string; 15 | employeeId: string; 16 | clientId: string; 17 | vendorId: string; 18 | deleted: boolean; 19 | featureFlags: { 20 | allowSendEmail: string; 21 | allowSendSms: string; 22 | betaFeatures: string; 23 | darkMode: string; 24 | }; 25 | } 26 | 27 | /** 28 | * An interface representing the custom Express request object. 29 | */ 30 | export interface ICustomExpressRequest extends Request { 31 | currentUser?: ICurrentUserPayload; 32 | } 33 | 34 | // const secretOrPrivateKey = process.env.JWT_KEY; 35 | 36 | export const currentUserMiddleware = ( 37 | req: ICustomExpressRequest, 38 | res: Response, 39 | next: NextFunction 40 | ) => { 41 | if (!req.cookies?.jwt && !req.headers?.authorization) { 42 | return next(); 43 | } 44 | try { 45 | if ( 46 | req.headers.authorization && 47 | req.headers.authorization.startsWith('Bearer ') 48 | ) { 49 | const jwtFromBearer = req.headers?.authorization?.split(' '); 50 | 51 | const jwtToken = jwtFromBearer[1]; 52 | 53 | req.currentUser = jwt.verify( 54 | jwtToken, 55 | // secretOrPrivateKey, 56 | process.env.JWT_KEY 57 | ) as ICurrentUserPayload; 58 | } else if (req.cookies.jwt) { 59 | req.currentUser = jwt.verify( 60 | req.cookies.jwt, 61 | // secretOrPrivateKey, 62 | process.env.JWT_KEY 63 | ) as ICurrentUserPayload; 64 | } 65 | } catch (error) { 66 | return next(error); 67 | } 68 | return next(); 69 | }; 70 | -------------------------------------------------------------------------------- /src/middlewares/errorHandler.middleware.ts: -------------------------------------------------------------------------------- 1 | /** 2 | * This middleware is responsible for returning a json every time 3 | * an error comes in. We use in the index.ts as global middleware 4 | */ 5 | import dotenv from 'dotenv'; 6 | import {NextFunction, Request, Response} from 'express'; 7 | import {CustomError} from '../errors'; 8 | import Logger from '../lib/logger'; 9 | 10 | dotenv.config(); 11 | 12 | const errorHandleMiddleware = ( 13 | err: CustomError, 14 | req: Request, 15 | res: Response, 16 | next: NextFunction 17 | ) => { 18 | const isProduction = process.env.NODE_ENV === 'production'; 19 | let errorMessage = {}; 20 | 21 | if (res.headersSent) { 22 | return next(err); 23 | } 24 | 25 | if (!isProduction) { 26 | Logger.debug(err.stack); 27 | errorMessage = err; 28 | } 29 | 30 | if (err) { 31 | return res.status(err.statusCode || 500).json({ 32 | status: 'error', 33 | statusCode: err.statusCode, 34 | message: err.message, 35 | error: { 36 | message: err.message, 37 | ...(!isProduction && {trace: errorMessage}), 38 | }, 39 | }); 40 | } 41 | }; 42 | 43 | export default errorHandleMiddleware; 44 | -------------------------------------------------------------------------------- /src/middlewares/morgan.middleware.ts: -------------------------------------------------------------------------------- 1 | import morgan, {StreamOptions} from 'morgan'; 2 | 3 | import Logger from '../lib/logger'; 4 | 5 | /** 6 | * Override the stream method by telling 7 | * Morgan to use our custom logger instead of the console.log. 8 | */ 9 | const stream: StreamOptions = { 10 | // Use the http severity 11 | write: message => Logger.http(message), 12 | }; 13 | 14 | /** 15 | * 16 | * Skip all the Morgan http log if the 17 | * application is not running in development mode. 18 | * This method is not really needed here since 19 | * we already told to the logger that it should print 20 | * only warning and error messages in production. 21 | */ 22 | const skip = () => { 23 | const env = process.env.NODE_ENV || 'development'; 24 | return env !== 'development'; 25 | }; 26 | 27 | // Build the morgan middleware 28 | const morganMiddleware = morgan( 29 | /** 30 | * Define message format string (this is the default one). 31 | * The message format is made from tokens, and each token is 32 | * defined inside the Morgan library. 33 | * You can create your custom token to show what do you want from a request. 34 | */ 35 | ':method :url :status :res[content-length] - :response-time ms - :remote-addr - :user-agent - :date[iso]', 36 | /** 37 | * Options: in this case, I overwrote the stream and the skip logic. 38 | * See the methods above. 39 | */ 40 | 41 | {stream, skip} 42 | ); 43 | 44 | export default morganMiddleware; 45 | -------------------------------------------------------------------------------- /src/middlewares/requireAdminRole.middleware.ts: -------------------------------------------------------------------------------- 1 | import {ICustomExpressRequest} from './currentUser.middleware'; 2 | import {Response, NextFunction} from 'express'; 3 | import {NotAuthorizedError} from '../errors'; 4 | 5 | export const requireAdminRoleMiddleware = ( 6 | req: ICustomExpressRequest, 7 | res: Response, 8 | next: NextFunction 9 | ) => { 10 | if (req.currentUser?.role !== 'admin') { 11 | throw new NotAuthorizedError('You are not an admin!'); 12 | } 13 | next(); 14 | }; 15 | -------------------------------------------------------------------------------- /src/middlewares/requireAuthentication.middleware.ts: -------------------------------------------------------------------------------- 1 | import {Response, NextFunction} from 'express'; 2 | import {NotAuthorizedError} from '../errors'; 3 | import {ICustomExpressRequest} from './currentUser.middleware'; 4 | 5 | export const requireAuthenticationMiddleware = ( 6 | req: ICustomExpressRequest, 7 | res: Response, 8 | next: NextFunction 9 | ) => { 10 | if (!req.currentUser) { 11 | throw new NotAuthorizedError('You are not authorized! Please login!'); 12 | } 13 | next(); 14 | }; 15 | -------------------------------------------------------------------------------- /src/middlewares/verifyApiRights.middleware.ts: -------------------------------------------------------------------------------- 1 | import {NextFunction, Response} from 'express'; 2 | 3 | import {apiRolesRights} from '../api/config/roles.config'; 4 | import {NotAuthorizedError} from '../errors'; 5 | import {ICustomExpressRequest} from './currentUser.middleware'; 6 | 7 | export const verifyApiRights = 8 | (...requiredRights: Array) => 9 | (req: ICustomExpressRequest, res: Response, next: NextFunction) => { 10 | if (requiredRights?.length) { 11 | const userRights = >( 12 | apiRolesRights.get(req.currentUser?.role) 13 | ); 14 | 15 | const hasRequiredRights = requiredRights.every((requiredRight: string) => 16 | userRights?.includes(requiredRight) 17 | ); 18 | 19 | if ( 20 | !hasRequiredRights && 21 | req.params.userId !== req.currentUser?.id 22 | ) { 23 | throw new NotAuthorizedError( 24 | 'You are not authorized to use this endpoint' 25 | ); 26 | } 27 | } 28 | next(); 29 | }; 30 | -------------------------------------------------------------------------------- /src/services/email/sparkpost.service.ts: -------------------------------------------------------------------------------- 1 | import SparkPost from 'sparkpost'; 2 | import {CustomError} from '../../errors'; 3 | import Logger from '../../lib/logger'; 4 | 5 | /** 6 | * Send reset password token to user email 7 | * @param email 8 | * @param token 9 | * @returns Promise> 10 | */ 11 | const sendResetPasswordToken = async ( 12 | email: string, 13 | token: string 14 | ): Promise< 15 | SparkPost.ResultsPromise<{ 16 | total_rejected_recipients: number; 17 | total_accepted_recipients: number; 18 | id: string; 19 | }> 20 | > => { 21 | const {SPARKPOST_API_KEY, SPARKPOST_SENDER_DOMAIN} = process.env; 22 | try { 23 | const euClient = new SparkPost(SPARKPOST_API_KEY, { 24 | origin: 'https://api.eu.sparkpost.com:443', 25 | }); 26 | 27 | const transmission = { 28 | recipients: [ 29 | { 30 | address: { 31 | email, 32 | name: email, 33 | }, 34 | }, 35 | ], 36 | content: { 37 | from: { 38 | email: `support@${SPARKPOST_SENDER_DOMAIN}`, 39 | name: 'Support Email', 40 | }, 41 | subject: 'Reset your password', 42 | reply_to: `support@${SPARKPOST_SENDER_DOMAIN}`, 43 | text: `Hello ${email}, we heard you lost your password. You can recover with this token: ${token}`, 44 | }, 45 | }; 46 | return await euClient.transmissions.send(transmission); 47 | } catch (error) { 48 | Logger.error(error); 49 | if (error instanceof CustomError) { 50 | throw new CustomError(error.statusCode, error.message); 51 | } 52 | // here we are throwing an error instead of returning it 53 | throw error; 54 | } 55 | }; 56 | 57 | export {sendResetPasswordToken}; 58 | -------------------------------------------------------------------------------- /src/services/google-pub-sub/pubsub.service.ts: -------------------------------------------------------------------------------- 1 | import {Message, PubSub, Subscription, Topic} from '@google-cloud/pubsub'; 2 | import DatabaseLog, { 3 | IDatabaseLog, 4 | } from '../../api/v1/database-logs/databaseLog.model'; 5 | import Logger from '../../lib/logger'; 6 | import {HydratedDocument} from 'mongoose'; 7 | 8 | const pubSubClient = new PubSub(); 9 | 10 | /** 11 | * declare custom payload interface 12 | */ 13 | export interface IPubSubPayload { 14 | [key: string]: T; 15 | } 16 | /** 17 | * declare custom error interface 18 | */ 19 | export interface IPubSubPublishError extends Error { 20 | statusCode: number; 21 | } 22 | 23 | export type TPubSubMessage = Message; 24 | 25 | /** 26 | * declare custom error class for PubSub publish error 27 | * We define a custom class since we want to throw a custom error with a custom status code 28 | */ 29 | class PubSubPublishError extends Error implements IPubSubPublishError { 30 | statusCode: number; 31 | 32 | constructor(message: string, statusCode: number) { 33 | super(message); 34 | this.statusCode = statusCode; 35 | } 36 | } 37 | 38 | /** 39 | * Publish message to PubSub 40 | * @param payload 41 | * @param topicName 42 | * @returns 43 | */ 44 | const publishMessageToPubSubTopic = async ( 45 | payload: IPubSubPayload, 46 | topicName: string 47 | ): Promise => { 48 | try { 49 | const dataBuffer = Buffer.from(JSON.stringify(payload)); 50 | 51 | const topic: Topic = pubSubClient.topic(topicName); 52 | 53 | if (!(await topic.exists())) { 54 | throw new PubSubPublishError(`Topic ${topicName} does not exist`, 404); 55 | } 56 | 57 | const message = { 58 | data: dataBuffer, 59 | }; 60 | 61 | const response = await topic.publishMessage(message); 62 | 63 | return response; 64 | } catch (error) { 65 | Logger.error(error); 66 | if (error instanceof PubSubPublishError) { 67 | throw error; 68 | } else { 69 | throw new PubSubPublishError( 70 | `Failed to publish message to topic ${topicName} with error: ${error}`, 71 | 404 72 | ); 73 | } 74 | } 75 | }; 76 | 77 | /** 78 | * 79 | * @param subscriptionName 80 | * @returns {Promise} 81 | */ 82 | const listenForPubSubPullSubscription = async ( 83 | subscriptionName: string, 84 | timeout: number 85 | ): Promise => { 86 | try { 87 | const subscriberOptions = { 88 | flowControl: { 89 | maxMessages: 10, 90 | }, 91 | }; 92 | 93 | const subscription: Subscription = pubSubClient.subscription( 94 | subscriptionName, 95 | subscriberOptions 96 | ); 97 | 98 | const checkSubscriptionExists = await subscription.exists(); 99 | 100 | /** 101 | * Check if subscription exists 102 | */ 103 | if (!checkSubscriptionExists[0]) { 104 | throw new PubSubPublishError( 105 | `Subscription ${subscriptionName} does not exist`, 106 | 404 107 | ); 108 | } 109 | 110 | // Instantiate the message counter 111 | let messageCount = 0; 112 | 113 | /** 114 | * Create an event handler to handle messages 115 | * @param message 116 | */ 117 | const messageHandler = async (message: TPubSubMessage): Promise => { 118 | const data = Buffer.from(message.data).toString('utf8'); 119 | 120 | const response = JSON.parse(data); 121 | 122 | /** 123 | * Create a database log for the message retrieved from PubSub 124 | * This is jsut for testing purposes to see if the message is being received 125 | */ 126 | const databaseLog: HydratedDocument = new DatabaseLog({ 127 | type: 'pubsub-message', 128 | date: new Date(), 129 | level: 'info', 130 | details: { 131 | channel: 'pubsub', 132 | message: 'Message retried from PubSub pull subscription', 133 | status: 'SUCCESS', 134 | response: { 135 | ...response, 136 | messageId: message.id, 137 | }, 138 | }, 139 | }); 140 | 141 | await databaseLog.save(); 142 | 143 | Logger.debug(`Received message ${message.id}:`); 144 | Logger.debug(`\tData: ${message.data}`); 145 | Logger.debug(`\tAttributes: ${JSON.stringify(message.attributes)}`); 146 | messageCount += 1; 147 | 148 | message.ack(); 149 | }; 150 | 151 | subscription.on('message', messageHandler); 152 | 153 | /** 154 | * Create an error handler to handle errors 155 | * @param error 156 | */ 157 | const errorHandler = (error: Error): void => { 158 | Logger.error(`Error: ${error}`); 159 | subscription.removeListener('message', messageHandler); 160 | }; 161 | 162 | subscription.on('error', errorHandler); 163 | 164 | /** 165 | * Set the timeout to 60 seconds to close the subscriptions 166 | */ 167 | setTimeout(() => { 168 | subscription.removeListener('message', messageHandler); 169 | subscription.removeListener('error', errorHandler); 170 | Logger.warn( 171 | `Subscription: ${subscriptionName} closed after ${timeout}s - ${messageCount} message(s) received.` 172 | ); 173 | }, timeout * 1000); 174 | 175 | return `Subscription ${subscriptionName} listening for messages`; 176 | } catch (error) { 177 | Logger.error(error); 178 | if (error instanceof PubSubPublishError) { 179 | throw error; 180 | } else { 181 | throw new PubSubPublishError( 182 | `Failed to pull message from topic ${subscriptionName} with error: ${error}`, 183 | 404 184 | ); 185 | } 186 | } 187 | }; 188 | 189 | export { 190 | publishMessageToPubSubTopic, 191 | listenForPubSubPullSubscription, 192 | PubSubPublishError, 193 | }; 194 | -------------------------------------------------------------------------------- /src/services/maps/maps.service.ts: -------------------------------------------------------------------------------- 1 | import { 2 | Client, 3 | DirectionsResponseData, 4 | Language, 5 | ResponseData, 6 | TravelMode, 7 | } from '@googlemaps/google-maps-services-js'; 8 | import Logger from '../../lib/logger'; 9 | 10 | export interface IGoogleMapsDirections { 11 | origin: string; 12 | destination: string; 13 | } 14 | 15 | const getGoogleMapsDirections = async ( 16 | origin: string, 17 | destination: string 18 | ): Promise => { 19 | try { 20 | const client = new Client(); 21 | 22 | const response = await client.directions({ 23 | params: { 24 | origin, 25 | destination, 26 | mode: 'driving', 27 | language: 'it', 28 | key: process.env.GOOGLE_MAPS_API_KEY, 29 | }, 30 | }); 31 | 32 | // if Google Maps API returns OK create an object to use with mongodb 33 | if (response.data.status === 'OK') { 34 | const direction = response.data.routes[0].legs[0]; 35 | 36 | const distanceObject = { 37 | status: response.data.status, 38 | error_message: response.data.error_message, 39 | distance: { 40 | text: direction.distance.text, 41 | value: direction.distance.value, 42 | }, 43 | duration: { 44 | text: direction.duration.text, 45 | value: direction.duration.value, 46 | }, 47 | start: { 48 | address: direction.start_address, 49 | location: { 50 | coordinates: [ 51 | direction.start_location.lat, 52 | direction.start_location.lng, 53 | ], 54 | }, 55 | }, 56 | end: { 57 | address: direction.end_address, 58 | location: { 59 | coordinates: [ 60 | direction.end_location.lat, 61 | direction.end_location.lng, 62 | ], 63 | }, 64 | }, 65 | }; 66 | return distanceObject; 67 | } 68 | return response.data; 69 | // eslint-disable-next-line @typescript-eslint/no-explicit-any 70 | } catch (error: any | unknown) { 71 | /** 72 | * Google Maps API returns error in different forms 73 | * If we use a throw we can block the execution of the function 74 | * so for now we just return an object containing the error 75 | * to store into mongodb travel schema 76 | * directions returns code: error.response.status 77 | * directions returns error: error.response.data.status 78 | * directions returns error message: error.response.data.error_message 79 | */ 80 | Logger.error(error); 81 | return { 82 | geocoded_waypoints: error.response.data.geocoded_waypoints, 83 | status: error.response.status, 84 | error_message: error.response.data.error_message, 85 | }; 86 | } 87 | }; 88 | 89 | export {getGoogleMapsDirections}; 90 | -------------------------------------------------------------------------------- /src/services/messaging/firebase.service.ts: -------------------------------------------------------------------------------- 1 | import {getMessaging} from 'firebase-admin/messaging'; 2 | import {CustomError} from '../../errors'; 3 | 4 | export interface IFirebaseMessage { 5 | title: string; 6 | body: string; 7 | } 8 | 9 | /** 10 | * Send a single firebase message 11 | * @param message 12 | * @param userFirebaseToken 13 | * @returns 14 | */ 15 | const sendSingleFirebaseMessage = async ( 16 | message: IFirebaseMessage, 17 | userFirebaseToken: string 18 | ): Promise => { 19 | const {title, body} = message; 20 | 21 | const messageObject = { 22 | data: { 23 | title, 24 | body, 25 | }, 26 | token: userFirebaseToken, 27 | }; 28 | 29 | try { 30 | const response = await getMessaging().send(messageObject); 31 | 32 | return {message: 'Successfully sent message', response}; 33 | } catch (error) { 34 | if (error instanceof CustomError) { 35 | throw new CustomError(error.statusCode, error.message); 36 | } 37 | 38 | throw error; 39 | } 40 | }; 41 | 42 | /** 43 | * Send a multicast firebase message 44 | * @param message 45 | * @param usersFirebaseTokens 46 | * @returns 47 | */ 48 | const sendMulticastFirebaseMessage = async ( 49 | message: IFirebaseMessage, 50 | usersFirebaseTokens: Array 51 | ): Promise<{ 52 | status: string; 53 | message: string; 54 | response: object; 55 | failedTokens?: string[]; 56 | }> => { 57 | const {title, body} = message; 58 | 59 | const messageObject = { 60 | data: { 61 | title, 62 | body, 63 | }, 64 | tokens: usersFirebaseTokens, 65 | }; 66 | 67 | try { 68 | const response = await getMessaging().sendMulticast(messageObject); 69 | 70 | if (response.failureCount > 0 && response.successCount > 0) { 71 | const failedTokens: string[] = []; 72 | response.responses.forEach((resp, idx) => { 73 | if (!resp.success) { 74 | failedTokens.push(usersFirebaseTokens[idx]); 75 | } 76 | }); 77 | 78 | return { 79 | status: 'incomplete', 80 | message: 'Some messages were not sent to users', 81 | response, 82 | failedTokens, 83 | }; 84 | } else if (response.successCount === 0) { 85 | return { 86 | status: 'error', 87 | message: 'Failed to send all messages to users', 88 | response, 89 | failedTokens: usersFirebaseTokens, 90 | }; 91 | } else { 92 | return { 93 | status: 'success', 94 | message: 'Successfully sent message to all users', 95 | response, 96 | failedTokens: [], 97 | }; 98 | } 99 | } catch (error) { 100 | if (error instanceof CustomError) { 101 | throw new CustomError(error.statusCode, error.message); 102 | } 103 | 104 | throw error; 105 | } 106 | }; 107 | 108 | export {sendSingleFirebaseMessage, sendMulticastFirebaseMessage}; 109 | -------------------------------------------------------------------------------- /src/services/messaging/whatsapp.service.ts: -------------------------------------------------------------------------------- 1 | import {initClient, ConversationParameter} from 'messagebird'; 2 | import Logger from '../../lib/logger'; 3 | import DatabaseLog, { 4 | IDatabaseLog, 5 | } from '../../api/v1/database-logs/databaseLog.model'; 6 | import {HydratedDocument} from 'mongoose'; 7 | 8 | const sendWhatsappMessageWithMessagebird = (toNumber: string): void => { 9 | const { 10 | MESSAGEBIRD_ACCESS_KEY, 11 | MESSAGEBIRD_WHATSAPP_CHANNEL_ID, 12 | MESSAGEBIRD_TEMPLATE_NAMESPACE_ID, 13 | MESSAGEBIRD_TEMPLATE_NAME_TEST, 14 | } = process.env; 15 | 16 | const messagebird = initClient(MESSAGEBIRD_ACCESS_KEY); 17 | 18 | const params: ConversationParameter = { 19 | to: toNumber, 20 | from: MESSAGEBIRD_WHATSAPP_CHANNEL_ID, 21 | type: 'hsm', 22 | reportUrl: 'https://your.report.url', 23 | content: { 24 | hsm: { 25 | namespace: MESSAGEBIRD_TEMPLATE_NAMESPACE_ID, 26 | templateName: MESSAGEBIRD_TEMPLATE_NAME_TEST, 27 | language: { 28 | code: 'en', 29 | policy: 'deterministic', 30 | }, 31 | components: [ 32 | { 33 | type: 'body', 34 | parameters: [{type: 'text', text: 'Variable 1'}], 35 | }, 36 | ], 37 | }, 38 | }, 39 | }; 40 | 41 | messagebird.conversations.send(params, async (err, response) => { 42 | if (err) { 43 | Logger.error(err); 44 | const databaseLog: HydratedDocument = new DatabaseLog({ 45 | type: 'message', 46 | date: new Date(), 47 | level: 'error', 48 | details: { 49 | channel: 'whatsapp', 50 | message: 'No message was sent', 51 | status: 'ERROR', 52 | response: {...err, recipient: toNumber}, 53 | }, 54 | }); 55 | 56 | await databaseLog.save(); 57 | } else { 58 | console.log('response', response); 59 | Logger.info(response); 60 | 61 | /** 62 | * Save the message to the database using the log model 63 | */ 64 | const databaseLog: HydratedDocument = new DatabaseLog({ 65 | type: 'message', 66 | date: new Date(), 67 | level: 'info', 68 | details: { 69 | channel: 'whatsapp', 70 | message: MESSAGEBIRD_TEMPLATE_NAME_TEST, 71 | status: 'SUCCESS', 72 | response: {...response, recipient: toNumber}, 73 | }, 74 | }); 75 | await databaseLog.save(); 76 | } 77 | }); 78 | }; 79 | 80 | export {sendWhatsappMessageWithMessagebird}; 81 | -------------------------------------------------------------------------------- /src/services/pdf/pdf.service.ts: -------------------------------------------------------------------------------- 1 | import axios from 'axios'; 2 | import PdfPrinter from 'pdfmake'; 3 | import {Storage} from '@google-cloud/storage'; 4 | import slugify from 'slugify'; 5 | import {format} from 'util'; 6 | 7 | import {TDocumentDefinitions} from 'pdfmake/interfaces'; 8 | import {IUploadResponse} from '../upload/upload.service'; 9 | 10 | /** 11 | * Define the storage bucket 12 | */ 13 | const storage = new Storage(); 14 | const bucket = storage.bucket(process.env.GOOGLE_STORAGE_BUCKET_NAME); 15 | 16 | /** 17 | * Define the interface for the pdf object 18 | */ 19 | export interface IPDFObject { 20 | key: string; 21 | } 22 | 23 | const generatePDF = async ( 24 | body: IPDFObject, 25 | directory: string 26 | ): Promise => { 27 | /** 28 | * Desctructure the body 29 | */ 30 | const {key} = body; 31 | 32 | /** 33 | * Define some constants 34 | */ 35 | const TODAY_DATE = new Intl.DateTimeFormat('it-IT').format(new Date()); 36 | const COMPANY_NAME = 'Company Name'; // replace with your own company name 37 | const COMPANY_LOGO = `https://storage.googleapis.com/${process.env.GOOGLE_STORAGE_BUCKET_NAME}/company-logo.png`; 38 | const SERVICE_FOLDER = 'express-typescript-api-rest'; // replace with your own service folder name 39 | 40 | /** 41 | * Get the logo image from the url 42 | */ 43 | const LOGO_IMAGE_URL = await axios 44 | .get(COMPANY_LOGO, {responseType: 'arraybuffer'}) 45 | .then(res => res.data); 46 | 47 | /** 48 | * return the array buffer for pdfmake 49 | */ 50 | const LOGO_IMAGE_BASE_64 = `data:image/png;base64,${Buffer.from( 51 | LOGO_IMAGE_URL 52 | ).toString('base64')}`; 53 | 54 | /** 55 | * Define the fonts 56 | */ 57 | const fonts = { 58 | Courier: { 59 | normal: 'Courier', 60 | bold: 'Courier-Bold', 61 | italics: 'Courier-Oblique', 62 | bolditalics: 'Courier-BoldOblique', 63 | }, 64 | Helvetica: { 65 | normal: 'Helvetica', 66 | bold: 'Helvetica-Bold', 67 | italics: 'Helvetica-Oblique', 68 | bolditalics: 'Helvetica-BoldOblique', 69 | }, 70 | Times: { 71 | normal: 'Times-Roman', 72 | bold: 'Times-Bold', 73 | italics: 'Times-Italic', 74 | bolditalics: 'Times-BoldItalic', 75 | }, 76 | Symbol: { 77 | normal: 'Symbol', 78 | }, 79 | ZapfDingbats: { 80 | normal: 'ZapfDingbats', 81 | }, 82 | }; 83 | 84 | // instantiate PDFMake 85 | const printer = new PdfPrinter(fonts); 86 | 87 | // set a general font size 88 | const fontSize = 12; 89 | 90 | /** 91 | * Define the document definition 92 | */ 93 | const docDefinition: TDocumentDefinitions = { 94 | info: { 95 | title: 'PDF Document', 96 | author: 'Author Name', 97 | subject: 'Subject', 98 | keywords: 'Keywords', 99 | }, 100 | header: (currentPage, pageCount, pageSize) => { 101 | return [ 102 | { 103 | text: `Header: ${new Intl.DateTimeFormat('it-IT').format( 104 | new Date() 105 | )} - ${key}`, 106 | alignment: currentPage % 2 ? 'right' : 'right', 107 | fontSize: fontSize - 4, 108 | lineHeight: 1.2, 109 | margin: [20, 20, 30, 20], 110 | }, 111 | { 112 | canvas: [ 113 | {type: 'rect', x: 170, y: 32, w: pageSize.width - 170, h: 40}, 114 | ], 115 | }, 116 | ]; 117 | }, 118 | footer: (currentPage, pageCount, pageSize) => { 119 | // you can apply any logic and return any valid pdfmake element 120 | return [ 121 | { 122 | text: 'This is a footer. You can apply any logic and return any valid pdfmake element', 123 | alignment: 'center', 124 | fontSize: fontSize - 6, 125 | lineHeight: 1.2, 126 | margin: [10, 10, 10, 10], 127 | }, 128 | { 129 | canvas: [ 130 | {type: 'rect', x: 170, y: 32, w: pageSize.width - 170, h: 40}, 131 | ], 132 | }, 133 | ]; 134 | }, 135 | content: [ 136 | { 137 | image: LOGO_IMAGE_BASE_64, 138 | width: 150, 139 | }, 140 | { 141 | text: `Some text here ${TODAY_DATE}`, 142 | fontSize: fontSize - 2, 143 | lineHeight: 1.3, 144 | margin: [10, 30, 10, 10], 145 | alignment: 'right', 146 | bold: true, 147 | }, 148 | ], 149 | defaultStyle: { 150 | font: 'Helvetica', 151 | }, 152 | }; 153 | 154 | // This produce a stream already, so we don't need to create a new one 155 | const pdfBuffer = printer.createPdfKitDocument(docDefinition); 156 | 157 | pdfBuffer.end(); 158 | 159 | /** 160 | * Define the file name 161 | */ 162 | const fileName = `FileName_${COMPANY_NAME.replace(/ /g, '_')}.pdf`; 163 | 164 | /** 165 | * FINALLY, RETURN THE PROMISE PASSING THE STREAM AND THE FILENAME 166 | */ 167 | return new Promise((resolve, reject) => { 168 | const blob = bucket.file( 169 | `${SERVICE_FOLDER}/${directory}/${slugify(fileName)}` 170 | ); 171 | 172 | const blobStream = pdfBuffer.pipe( 173 | blob.createWriteStream({ 174 | resumable: false, 175 | public: true, 176 | metadata: { 177 | contentType: 'application/pdf', 178 | cacheControl: 'no-store', 179 | }, 180 | }) 181 | ); 182 | 183 | blobStream 184 | .on('finish', () => { 185 | const blobName = blob.name; 186 | const publicUrl = format( 187 | `https://storage.googleapis.com/${bucket.name}/${blob.name}` 188 | ); 189 | 190 | resolve({publicUrl, blobName}); 191 | }) 192 | .on('error', error => { 193 | reject(error || 'unable to upload file'); 194 | }); 195 | }); 196 | }; 197 | 198 | export {generatePDF}; 199 | -------------------------------------------------------------------------------- /src/services/upload/upload.service.ts: -------------------------------------------------------------------------------- 1 | import {Storage} from '@google-cloud/storage'; 2 | import fs from 'fs'; 3 | import slugify from 'slugify'; 4 | import stream from 'stream'; 5 | import {format} from 'util'; 6 | import crypto from 'crypto'; 7 | import {CustomError} from '../../errors'; 8 | import Logger from '../../lib/logger'; 9 | 10 | export interface IUploadResponse { 11 | publicUrl: string; 12 | blobName: string; 13 | } 14 | 15 | const storage = new Storage(); 16 | const bucket = storage.bucket(process.env.GOOGLE_STORAGE_BUCKET_NAME); 17 | /** 18 | * This function create and upload a file to the local file system 19 | * 0. Always pass a buffer like argument otherwise will fail 20 | * 1. Takes a buffer argument 21 | * @param buffer 22 | * @param filename 23 | */ 24 | const streamBufferToLFS = async ( 25 | buffer: Buffer, 26 | filename: string 27 | ): Promise => { 28 | const file = `${filename}-${Date.now()}.xml`; 29 | fs.writeFile(file, buffer, err => { 30 | if (err) { 31 | console.log(err); 32 | } else { 33 | Logger.debug('The file was saved!'); 34 | } 35 | }); 36 | }; 37 | 38 | /** 39 | * This function upload a file directly to gcs without passing buffer. 40 | * 0. To make this work use multer memory storage middleware 41 | * 1. Only instance of a file with buffer will succeed 42 | * 2. Return a public url 43 | * @param file 44 | * @returns 45 | */ 46 | const uploadFileToGCS = async ( 47 | file: Express.Multer.File 48 | ): Promise => { 49 | const RANDOM_ID = Math.random().toString(36).substring(2, 15); // replace with your own id 50 | const SERVICE_FOLDER = 'express-typescript-api-rest'; // replace with your own service folder name 51 | const DIRECTORY = `uploads/${RANDOM_ID}`; // replace with your own directory name 52 | 53 | return new Promise((resolve, reject) => { 54 | const {originalname, buffer, mimetype} = file; 55 | 56 | const blob = bucket.file( 57 | `${SERVICE_FOLDER}/${DIRECTORY}/${slugify(originalname)}` 58 | ); 59 | 60 | const blobStream = blob.createWriteStream({ 61 | resumable: false, 62 | public: true, 63 | predefinedAcl: 'publicRead', 64 | metadata: { 65 | contentType: mimetype, 66 | cacheControl: 'no-store', 67 | }, 68 | }); 69 | 70 | blobStream 71 | .on('finish', () => { 72 | const blobName = blob.name; 73 | const publicUrl = format( 74 | `https://storage.googleapis.com/${bucket.name}/${blob.name}` 75 | ); 76 | resolve({publicUrl, blobName}); 77 | }) 78 | .on('error', error => { 79 | reject(error || 'unable to upload file'); 80 | }) 81 | .end(buffer); 82 | }); 83 | }; 84 | 85 | /** 86 | * This function take a pure buffer and convert to stream 87 | * 0. Always pass a buffer like argument otherwise will fail 88 | * 1. Takes a buffer argument 89 | * 2. Create a stream to store in memory 90 | * 3. Pipe the stream to Google Cloud Storage 91 | * 4. As soon as the file is recreated returns a public url 92 | * @return {Promise} 93 | * @param buffer 94 | */ 95 | const streamBufferToGCS = async (buffer: Buffer): Promise => { 96 | const RANDOM_ID = Math.random().toString(36).substring(2, 15); // replace with your own id 97 | const SERVICE_FOLDER = 'express-typescript-api-rest'; // replace with your own service folder name 98 | const DIRECTORY = `uploads/${RANDOM_ID}`; // replace with your own directory name 99 | const FILE_NAME = 'test.xml'; // replace with your own file name 100 | 101 | const dataStream = new stream.PassThrough(); 102 | 103 | dataStream.push(buffer); 104 | dataStream.push(null); 105 | 106 | return new Promise((resolve, reject) => { 107 | const blob = bucket.file(`${SERVICE_FOLDER}/${DIRECTORY}/${FILE_NAME}`); 108 | 109 | const blobStream = dataStream.pipe( 110 | blob.createWriteStream({ 111 | resumable: false, 112 | public: true, 113 | predefinedAcl: 'publicRead', 114 | metadata: { 115 | cacheControl: 'no-store', 116 | }, 117 | }) 118 | ); 119 | 120 | blobStream 121 | .on('finish', () => { 122 | const publicUrl = format( 123 | `https://storage.googleapis.com/${bucket.name}/${blob.name}` 124 | ); 125 | resolve({publicUrl, blobName: blob.name}); 126 | }) 127 | .on('error', error => { 128 | reject(error); 129 | }); 130 | }); 131 | }; 132 | 133 | /** 134 | * This function take an object that also contain a buffer 135 | * 0. Always pass an object that contains buffer otherwise will fail 136 | * 1. Takes also a directory like argument 137 | * 2. Create a stream to store in memory 138 | * 3. Pipe the stream to Google Cloud Storage 139 | * 4. As soon as the file is recreated returns a public url 140 | * @return {Promise} 141 | * @param file 142 | * @param {string} directory 143 | */ 144 | const streamFileToGCS = async ( 145 | file: Express.Multer.File, 146 | directory: string 147 | ): Promise => { 148 | const SERVICE_FOLDER = 'express-typescript-api-rest'; // replace with your own service folder name 149 | 150 | // destructuring data file object 151 | const {originalname, buffer, mimetype} = file; 152 | 153 | // generate a random uuid to avoid duplicate file name 154 | const uuid = crypto.randomBytes(4).toString('hex'); 155 | 156 | // generate a file name 157 | const fileName = `${uuid} - ${originalname.replace(/ /g, '_')}`; 158 | 159 | // Instantiate a stream to read the file buffer 160 | const dataStream = new stream.PassThrough(); 161 | 162 | dataStream.push(buffer); 163 | dataStream.push(null); 164 | 165 | return new Promise((resolve, reject) => { 166 | const blob = bucket.file( 167 | `${SERVICE_FOLDER}/${directory}/${slugify(fileName || uuid)}` 168 | ); 169 | 170 | const blobStream = dataStream.pipe( 171 | blob.createWriteStream({ 172 | resumable: false, 173 | public: true, 174 | predefinedAcl: 'publicRead', 175 | metadata: { 176 | contentType: mimetype, 177 | cacheControl: 'no-store', 178 | }, 179 | }) 180 | ); 181 | 182 | blobStream 183 | .on('finish', () => { 184 | const blobName = blob.name; 185 | const publicUrl = format( 186 | `https://storage.googleapis.com/${bucket.name}/${blob.name}` 187 | ); 188 | resolve({publicUrl, blobName}); 189 | }) 190 | .on('error', error => { 191 | reject(error); 192 | }); 193 | }); 194 | }; 195 | 196 | /** 197 | * 198 | * @param blobName 199 | * @returns 200 | */ 201 | const deleteFileFromGCS = async (blobName: string): Promise => { 202 | try { 203 | await bucket.file(blobName).delete(); 204 | } catch (e) { 205 | Logger.error(e); 206 | // console.log(e.toString()); 207 | if (e instanceof CustomError) { 208 | throw new CustomError( 209 | 404, 210 | `Failed to delete file ${blobName}: ${e.message}` 211 | ); 212 | } else { 213 | throw new Error(`Failed to delete file ${blobName}`); 214 | } 215 | } 216 | }; 217 | 218 | export { 219 | streamBufferToLFS, 220 | uploadFileToGCS, 221 | streamBufferToGCS, 222 | streamFileToGCS, 223 | deleteFileFromGCS, 224 | }; 225 | -------------------------------------------------------------------------------- /src/services/xml/xml.service.ts: -------------------------------------------------------------------------------- 1 | import {create} from 'xmlbuilder2'; 2 | import stream from 'stream'; 3 | import {Storage} from '@google-cloud/storage'; 4 | import crypto from 'crypto'; 5 | import slugify from 'slugify'; 6 | import {IUploadResponse} from '../upload/upload.service'; 7 | 8 | export interface IXMLObject { 9 | key: string; 10 | } 11 | 12 | const storage = new Storage(); 13 | const bucket = storage.bucket(process.env.GOOGLE_STORAGE_BUCKET_NAME); 14 | 15 | const generateXML = async (body: IXMLObject): Promise => { 16 | const SERVICE_FOLDER = 'express-typescript-api-rest'; 17 | const DIRECTORY = 'xml'; 18 | const UUID = crypto.randomBytes(4).toString('hex'); 19 | 20 | const {key} = body; 21 | 22 | const doc = create( 23 | {version: '1.0', encoding: 'UTF-8'}, 24 | { 25 | // '?': 'xml-stylesheet type="text/xsl" href="https://storage.googleapis.com/your-bucket/assets/xml/stylesheet.xsl"', 26 | 'p:MainXmlSubject': { 27 | '@': { 28 | 'xmlns:ds': 'http://www.w3.org/2000/09/xmldsig#', 29 | 'xmlns:p': 30 | 'http://ivaservizi.agenziaentrate.gov.it/docs/xsd/fatture/v1.2', 31 | 'xmlns:xsi': 'http://www.w3.org/2001/XMLSchema-instance', 32 | versione: 'FPR12', 33 | }, 34 | Header: { 35 | SubHeader: { 36 | Key: { 37 | Value: 'value', 38 | }, 39 | }, 40 | }, 41 | Body: { 42 | SubBody: { 43 | Key: { 44 | Value: 'value', 45 | }, 46 | }, 47 | }, 48 | }, 49 | } 50 | ).doc(); 51 | 52 | const xmlBuffer = doc.end({headless: true, prettyPrint: true}); 53 | 54 | const dataStreama = new stream.PassThrough(); 55 | 56 | dataStreama.push(xmlBuffer); 57 | dataStreama.push(null); 58 | 59 | const fileName = `IT09568521000_${UUID}_${key}.xml`; 60 | 61 | return new Promise((resolve, reject) => { 62 | const blob = bucket.file( 63 | `${SERVICE_FOLDER}/${DIRECTORY}/${slugify(fileName)}.xml` 64 | ); 65 | 66 | const blobStream = dataStreama.pipe( 67 | blob.createWriteStream({ 68 | resumable: false, 69 | public: true, 70 | predefinedAcl: 'publicRead', 71 | metadata: { 72 | cacheControl: 'no-store', 73 | contentType: 'application/xml', 74 | }, 75 | }) 76 | ); 77 | 78 | blobStream 79 | .on('finish', () => { 80 | const blobName = blob.name; 81 | const publicUrl = `https://storage.googleapis.com/${bucket.name}/${blob.name}`; 82 | resolve({publicUrl, blobName}); 83 | }) 84 | .on('error', error => { 85 | reject(error); 86 | }); 87 | }); 88 | }; 89 | 90 | export {generateXML}; 91 | -------------------------------------------------------------------------------- /src/tests/index.test.ts: -------------------------------------------------------------------------------- 1 | import mongoose from 'mongoose'; 2 | // eslint-disable-next-line node/no-unpublished-import 3 | import request from 'supertest'; 4 | import app from '../index'; 5 | 6 | /** 7 | * Test to see if the server is running 8 | */ 9 | describe(`GET /api/v1/${process.env.SERVICE_NAME}`, () => { 10 | test('should return 200 OK', async () => { 11 | const res = await request(app).get(`/api/v1/${process.env.SERVICE_NAME}`); 12 | expect(res.statusCode).toEqual(200); 13 | }); 14 | afterAll(done => { 15 | // Closing the DB connection allows Jest to exit successfully. 16 | mongoose.connection.close(); 17 | done(); 18 | }); 19 | }); 20 | -------------------------------------------------------------------------------- /src/tests/utils/dates.utils.test.ts: -------------------------------------------------------------------------------- 1 | import { 2 | formatDateToITLocale, 3 | generateDateRangeArray, 4 | getDaysCountBetweenDates, 5 | getFormattedDate, 6 | getMonthDaysCount, 7 | getMonthsCountBetweenDates, 8 | isDateToday, 9 | } from '../../utils/dates.utils'; 10 | 11 | describe('Date Utilities Tests Suite', () => { 12 | describe('generateDateRangeArray', () => { 13 | it('should return an empty array when no start or end date is provided', () => { 14 | // eslint-disable-next-line @typescript-eslint/ban-ts-comment 15 | // @ts-ignore 16 | const result = generateDateRangeArray(null, null); 17 | expect(result).toEqual([]); 18 | }); 19 | 20 | it('should generate an array of dates between the start and end dates', () => { 21 | const startDate = new Date('2023-02-01'); 22 | const endDate = new Date('2023-02-05'); 23 | const result = generateDateRangeArray(startDate, endDate); 24 | expect(result.length).toEqual(5); 25 | }); 26 | }); 27 | 28 | describe('getMonthDaysCount', () => { 29 | it('should return the correct number of days in the specified month', () => { 30 | const result = getMonthDaysCount(2, 2023); 31 | expect(result).toEqual(28); 32 | }); 33 | }); 34 | 35 | describe('getDaysCountBetweenDates', () => { 36 | it('should return the correct number of days between two dates', () => { 37 | const startDate = new Date('2023-02-01'); 38 | const endDate = new Date('2023-02-05'); 39 | const result = getDaysCountBetweenDates(startDate, endDate); 40 | expect(result).toEqual(4); 41 | }); 42 | }); 43 | 44 | describe('getMonthsCountBetweenDates', () => { 45 | it('should return the correct number of months between two dates', () => { 46 | const startDate = new Date('2022-12-01'); 47 | const endDate = new Date('2023-02-01'); 48 | const result = getMonthsCountBetweenDates(startDate, endDate); 49 | expect(result).toEqual(2); 50 | }); 51 | }); 52 | 53 | describe('formatDateToITLocale', () => { 54 | it('should return a formatted date in Italian locale', () => { 55 | const date = new Date('2023-02-20'); 56 | const result = formatDateToITLocale(date); 57 | expect(result).toEqual('20/02/2023'); 58 | }); 59 | }); 60 | 61 | describe('isDateToday', () => { 62 | it('should return true when the date is today', () => { 63 | const date = new Date(); 64 | const result = isDateToday(date); 65 | expect(result).toEqual(true); 66 | }); 67 | 68 | it('should return false when the date is not today', () => { 69 | const today = new Date(); 70 | const nonTodayDate = new Date( 71 | today.getFullYear(), 72 | today.getMonth(), 73 | today.getDate() - 1 74 | ); 75 | const result = isDateToday(nonTodayDate); 76 | expect(result).toEqual(false); 77 | }); 78 | }); 79 | 80 | describe('getFormattedDate', () => { 81 | it('should return an empty string if the input date is not valid', () => { 82 | const invalidDate = new Date('invalid'); 83 | const result = getFormattedDate(invalidDate); 84 | expect(result).toEqual(''); 85 | }); 86 | 87 | it('should return a formatted date in the specified format', () => { 88 | const date = new Date('2023-02-20'); 89 | const result = getFormattedDate(date, 'dd/MM/yyyy'); 90 | expect(result).toEqual('20/02/2023'); 91 | }); 92 | 93 | it('should return a formatted date in the default format if no format is specified', () => { 94 | const date = new Date('2023-02-20'); 95 | const result = getFormattedDate(date); 96 | expect(result).toEqual('2023-02-20'); 97 | }); 98 | }); 99 | }); 100 | -------------------------------------------------------------------------------- /src/tests/utils/generators.utils.test.ts: -------------------------------------------------------------------------------- 1 | // eslint-disable-next-line node/no-unpublished-import 2 | import request from 'supertest'; 3 | import jwt from 'jsonwebtoken'; 4 | import express from 'express'; 5 | import {ICustomExpressRequest} from '../../middlewares/currentUser.middleware'; 6 | import { 7 | generateOTP, 8 | generateCookie, 9 | generateJsonWebToken, 10 | JwtPayload, 11 | } from '../../../src/utils/generators.utils'; 12 | 13 | // Mock process.env.JWT_KEY with a string value 14 | process.env.JWT_KEY = 'mock_jwt_key'; 15 | 16 | describe('Generators Utilities Tests Suite', () => { 17 | describe('generateOTP', () => { 18 | it('should generate a 6 digit OTP', () => { 19 | const otp = generateOTP(); 20 | expect(otp).toHaveLength(6); 21 | }); 22 | }); 23 | 24 | describe('generateJsonWebToken', () => { 25 | it('should generate a JWT token', () => { 26 | const payload = {id: 1, username: 'user1'}; 27 | const token = generateJsonWebToken(payload); 28 | const decoded = jwt.verify(token, process.env.JWT_KEY!) as JwtPayload< 29 | typeof payload 30 | >; 31 | expect(decoded.payload).toEqual(payload); 32 | }); 33 | }); 34 | 35 | describe('generateCookie', () => { 36 | it('should set a cookie with the given name and token', async () => { 37 | const app = express(); 38 | const cookieName = 'my-cookie'; 39 | const token = 'my-token'; 40 | 41 | app.get('/set-cookie', (req, res) => { 42 | generateCookie(cookieName, token, req as ICustomExpressRequest, res); 43 | res.status(200).send('Cookie set'); 44 | }); 45 | 46 | const response = await request(app).get('/set-cookie'); 47 | 48 | expect(response.status).toBe(200); 49 | expect(response.header['set-cookie']).toBeDefined(); 50 | expect(response.header['set-cookie'][0]).toContain(cookieName); 51 | expect(response.header['set-cookie'][0]).toContain(token); 52 | }); 53 | }); 54 | }); 55 | -------------------------------------------------------------------------------- /src/tests/utils/objects.utils.test.ts: -------------------------------------------------------------------------------- 1 | import {cleanObject} from '../../utils/objects.utils'; 2 | 3 | describe('cleanObject', () => { 4 | it('should remove null and undefined values from an object and its nested objects', () => { 5 | const input = { 6 | a: 1, 7 | b: null, 8 | c: { 9 | d: 'hello', 10 | e: null, 11 | f: { 12 | g: 2, 13 | h: undefined, 14 | }, 15 | }, 16 | d: [ 17 | { 18 | a: 1, 19 | b: [null, undefined, 1, 2, 3], 20 | }, 21 | { 22 | b: null, 23 | }, 24 | ], 25 | }; 26 | const expectedOutput = { 27 | a: 1, 28 | c: { 29 | d: 'hello', 30 | f: { 31 | g: 2, 32 | }, 33 | }, 34 | d: [ 35 | { 36 | a: 1, 37 | b: [1, 2, 3], 38 | }, 39 | ], 40 | }; 41 | expect(cleanObject(input)).toEqual(expectedOutput); 42 | }); 43 | }); 44 | -------------------------------------------------------------------------------- /src/types/xss-clean.d.ts: -------------------------------------------------------------------------------- 1 | declare module 'xss-clean' { 2 | const value: Function; 3 | 4 | export default value; 5 | } 6 | -------------------------------------------------------------------------------- /src/utils/createCookieFromToken.utils.ts: -------------------------------------------------------------------------------- 1 | import {Response} from 'express'; 2 | import {IUserMethods} from '../api/v1/user/user.model'; 3 | import {ICustomExpressRequest} from '../middlewares/currentUser.middleware'; 4 | 5 | /** 6 | * 7 | * This function returns a json with user data, 8 | * token and the status and set a cookie with 9 | * the name jwt. We use this in the response 10 | * of login or signup 11 | * @param user: 12 | * @param statusCode 13 | * @param req 14 | * @param res 15 | */ 16 | const createCookieFromToken = ( 17 | user: IUserMethods, 18 | statusCode: number, 19 | req: ICustomExpressRequest, 20 | res: Response 21 | ) => { 22 | const token = user.generateVerificationToken(); 23 | 24 | const cookieOptions = { 25 | expires: new Date(Date.now() + 24 * 60 * 60 * 1000), 26 | httpOnly: true, 27 | secure: req.secure || req.headers['x-forwarded-proto'] === 'https', 28 | }; 29 | 30 | res.cookie('jwt', token, cookieOptions); 31 | 32 | res.status(statusCode).json({ 33 | status: 'success', 34 | token, 35 | token_expires: cookieOptions.expires, 36 | data: { 37 | user, 38 | }, 39 | }); 40 | }; 41 | 42 | export default createCookieFromToken; 43 | -------------------------------------------------------------------------------- /src/utils/dates.utils.ts: -------------------------------------------------------------------------------- 1 | /** 2 | * Generates an array of dates between the start and end dates 3 | * @param startDate 4 | * @param endDate 5 | * @returns 6 | */ 7 | const generateDateRangeArray = (startDate: Date, endDate: Date) => { 8 | let dates: Date[] = []; 9 | 10 | if (!startDate || !endDate) { 11 | return dates; 12 | } 13 | 14 | // to avoid modifying the original date 15 | const currentDate = new Date(startDate); 16 | 17 | while (currentDate < new Date(endDate)) { 18 | dates = [...dates, new Date(currentDate)]; 19 | currentDate.setDate(currentDate.getDate() + 1); 20 | } 21 | dates = [...dates, new Date(endDate)]; 22 | return dates; 23 | }; 24 | 25 | /** 26 | * Returns the number of days in a month 27 | * @param month 28 | * @param year 29 | * @returns 30 | */ 31 | const getMonthDaysCount = (month: number, year: number): number => { 32 | return new Date(year, month, 0).getDate(); 33 | }; 34 | 35 | /** 36 | * Returns the number of days between two dates 37 | * @param startDateObj 38 | * @param endDateObj 39 | * @returns 40 | */ 41 | const getDaysCountBetweenDates = (startDateObj: Date, endDateObj: Date) => { 42 | const MILLISECONDS_PER_DAY = 24 * 60 * 60 * 1000; 43 | const startDate = new Date(startDateObj).setHours(0, 0, 0, 0); 44 | const endDate = new Date(endDateObj).setHours(0, 0, 0, 0); 45 | 46 | const timeDiff = Math.abs(startDate - endDate); 47 | const daysDiff = Math.ceil(timeDiff / MILLISECONDS_PER_DAY); 48 | 49 | return daysDiff; 50 | }; 51 | 52 | /** 53 | * Returns the number of months between two dates 54 | * @param date1 55 | * @param date2 56 | * @returns 57 | */ 58 | const getMonthsCountBetweenDates = (startDateObj: Date, endDateObj: Date) => { 59 | const startDate = new Date(startDateObj); 60 | const endDate = new Date(endDateObj); 61 | 62 | const startYear = startDate.getFullYear(); 63 | const startMonth = startDate.getMonth(); 64 | 65 | const endYear = endDate.getFullYear(); 66 | const endMonth = endDate.getMonth(); 67 | 68 | const monthsDiff = (endYear - startYear) * 12 + (endMonth - startMonth); 69 | 70 | return Math.abs(monthsDiff); 71 | }; 72 | 73 | /** 74 | * Returns the number of months between two dates 75 | * @param date 76 | * @returns 77 | */ 78 | const formatDateToITLocale = (date: Date) => { 79 | return new Intl.DateTimeFormat('it-IT', { 80 | year: 'numeric', 81 | month: '2-digit', 82 | day: '2-digit', 83 | }).format(date); 84 | }; 85 | 86 | /** 87 | * Checks if a date is today 88 | * @param date 89 | * @returns 90 | */ 91 | const isDateToday = (date: Date) => { 92 | const today = new Date(); 93 | return ( 94 | date.getDate() === today.getDate() && 95 | date.getMonth() === today.getMonth() && 96 | date.getFullYear() === today.getFullYear() 97 | ); 98 | }; 99 | 100 | const getFormattedDate = (date: Date, format = 'yyyy-MM-dd') => { 101 | if (isNaN(date.getTime())) { 102 | return ''; 103 | } 104 | 105 | const dateFormatter = new Intl.DateTimeFormat('en-US', { 106 | year: 'numeric', 107 | month: '2-digit', 108 | day: '2-digit', 109 | }); 110 | 111 | const formattedDate = dateFormatter.format(date); 112 | 113 | const formattedDateParts = formattedDate.split('/'); 114 | const year = formattedDateParts[2]; 115 | const month = formattedDateParts[0]; 116 | const day = formattedDateParts[1]; 117 | 118 | return format 119 | .replace(/yyyy/g, year) 120 | .replace(/MM/g, month) 121 | .replace(/dd/g, day); 122 | }; 123 | 124 | export { 125 | generateDateRangeArray, 126 | getMonthDaysCount, 127 | getDaysCountBetweenDates, 128 | getMonthsCountBetweenDates, 129 | formatDateToITLocale, 130 | isDateToday, 131 | getFormattedDate, 132 | }; 133 | -------------------------------------------------------------------------------- /src/utils/generators.utils.ts: -------------------------------------------------------------------------------- 1 | import crypto from 'crypto'; 2 | import {ICustomExpressRequest} from '../middlewares/currentUser.middleware'; 3 | import {Response} from 'express'; 4 | import jwt from 'jsonwebtoken'; 5 | 6 | export interface JwtPayload { 7 | [key: string]: T; 8 | } 9 | /** 10 | * Generate a json web token 11 | * @param payload 12 | * @returns 13 | */ 14 | const generateJsonWebToken = (payload: JwtPayload): string => { 15 | const jwtKey = process.env.JWT_KEY; 16 | 17 | if (!jwtKey) { 18 | throw new Error('Missing JWT'); 19 | } 20 | 21 | return jwt.sign({payload}, jwtKey, { 22 | expiresIn: '10d', 23 | // algorithm: 'RS256', 24 | }); 25 | }; 26 | 27 | /** 28 | * Generate a cookie with a token 29 | * @param cookieName 30 | * @param token 31 | * @param req 32 | * @param res 33 | */ 34 | const generateCookie = ( 35 | cookieName: string, 36 | token: string, 37 | req: ICustomExpressRequest, 38 | res: Response 39 | ) => { 40 | const cookieOptions = { 41 | expires: new Date(Date.now() + 10 * 24 * 60 * 60 * 1000), 42 | httpOnly: true, 43 | secure: req.secure || req.headers['x-forwarded-proto'] === 'https', 44 | }; 45 | 46 | res.cookie(cookieName, token, cookieOptions); 47 | }; 48 | 49 | /** 50 | * Generate a random OTP 51 | * @returns 52 | */ 53 | const generateOTP = (): string => { 54 | const chars = '0123456789'; 55 | let otp = ''; 56 | 57 | while (otp.length < 6) { 58 | const randomBytes = crypto.randomBytes(4); 59 | const randomIndex = randomBytes.readUInt32BE(0) % chars.length; 60 | otp += chars.charAt(randomIndex); 61 | } 62 | 63 | return otp; 64 | }; 65 | 66 | export {generateOTP, generateCookie, generateJsonWebToken}; 67 | -------------------------------------------------------------------------------- /src/utils/objects.utils.ts: -------------------------------------------------------------------------------- 1 | import cleanDeep from 'clean-deep'; 2 | 3 | interface IObjectWithNulls { 4 | [key: string]: unknown | null | IObjectWithNulls; 5 | } 6 | 7 | const cleanObject = (obj: IObjectWithNulls): IObjectWithNulls => { 8 | return cleanDeep(obj); 9 | }; 10 | 11 | export {cleanObject}; 12 | -------------------------------------------------------------------------------- /tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "extends": "./node_modules/gts/tsconfig-google.json", 3 | "compilerOptions": { 4 | "rootDir": ".", 5 | "outDir": "build", 6 | "esModuleInterop": true, 7 | "experimentalDecorators": true, 8 | "emitDecoratorMetadata": true, 9 | "resolveJsonModule": true, 10 | "noImplicitReturns": false, 11 | "paths": { 12 | "*": ["./node_modules/*", "./src/types/*"] 13 | } 14 | }, 15 | "include": ["src/**/*.ts", "test/**/*.ts"], 16 | "exclude": ["node_modules", "build", "docs", "**/*.test.ts"] 17 | } 18 | -------------------------------------------------------------------------------- /typedoc.json: -------------------------------------------------------------------------------- 1 | { 2 | // Comments are supported, like tsconfig.json 3 | "entryPoints": [ 4 | "src/index.ts", 5 | "src/api/v1/app/app.controller.ts", 6 | "src/middlewares/currentUser.middleware.ts" 7 | ], 8 | "exclude": ["**/node_modules/**", "**/*.spec.ts", "**/*.test.ts", "dist"], 9 | 10 | "out": "docs" 11 | } 12 | --------------------------------------------------------------------------------