├── packages ├── auth │ └── .gitkeep ├── docs │ ├── .gitignore │ ├── src │ │ ├── biz │ │ │ └── README.md │ │ ├── changelog │ │ │ └── README.md │ │ ├── .vuepress │ │ │ ├── styles │ │ │ │ └── palette.styl │ │ │ └── config.js │ │ ├── README.md │ │ └── dev │ │ │ ├── README.md │ │ │ ├── source-code.md │ │ │ ├── unit-test.md │ │ │ └── prerequisite.md │ ├── .dockerignore │ ├── Dockerfile │ └── package.json ├── backend │ ├── .vscode │ │ ├── settings.json │ │ └── extensions.json │ ├── .prettierignore │ ├── .gitignore │ ├── shared │ │ ├── core │ │ │ ├── index.ts │ │ │ ├── base.svc.ts │ │ │ ├── prisma.svc.ts │ │ │ └── graphql.svc.ts │ │ ├── prisma │ │ │ ├── migrations │ │ │ │ ├── migration_lock.toml │ │ │ │ └── 20211001002439_init │ │ │ │ │ └── migration.sql │ │ │ └── schema.prisma │ │ ├── @types │ │ │ ├── prisma │ │ │ │ └── index.d.ts │ │ │ ├── moleculer │ │ │ │ └── index.d.ts │ │ │ └── index.d.ts │ │ ├── utils │ │ │ ├── cors.ts │ │ │ ├── prisma.ts │ │ │ ├── db.ts │ │ │ └── graphql.ts │ │ ├── middlewares │ │ │ └── env.middleware.ts │ │ ├── mixins │ │ │ ├── config.mixin.ts │ │ │ ├── firebase.mixin.ts │ │ │ ├── apollo-server.mixin.ts │ │ │ ├── prisma.mixin.ts │ │ │ └── graphql.mixin.ts │ │ └── configs │ │ │ └── moleculer.config.ts │ ├── services │ │ ├── core │ │ │ ├── package.json │ │ │ └── config.svc.ts │ │ ├── interaction │ │ │ ├── package.json │ │ │ └── mail.svc.ts │ │ ├── profile │ │ │ ├── package.json │ │ │ ├── profile-db.svc.ts │ │ │ └── profile-gql.svc.ts │ │ ├── storage │ │ │ ├── package.json │ │ │ └── s3.svc.ts │ │ ├── firebase │ │ │ ├── package.json │ │ │ └── firebase.svc.ts │ │ └── api-gateway │ │ │ ├── package.json │ │ │ ├── api.svc.ts │ │ │ └── routes.ts │ ├── .prettierrc │ ├── .editorconfig │ ├── tools │ │ ├── firebase │ │ │ ├── index.html │ │ │ └── app.js │ │ ├── create-env.js │ │ ├── prisma-env.js │ │ ├── login-dev.js │ │ └── generate-graphql.ts │ ├── default.env │ ├── Dockerfile │ ├── tsconfig.json │ ├── package.json │ ├── build.js │ └── graphql │ │ └── schema-dev.graphql └── frontend │ └── package.json ├── provision ├── .gitignore ├── .env ├── clean.sh ├── setup.sh ├── images │ └── node-builder-12-alpine │ │ └── Dockerfile ├── setup-docker.sh └── docker-compose.yml ├── .prettierignore ├── .devcontainer ├── provision │ ├── env │ │ ├── config.json │ │ ├── aws.env │ │ └── provision.js │ └── app │ │ └── docker-compose.yaml ├── scripts │ ├── install-firacode.sh │ └── install-dev-tools.sh └── devcontainer.json ├── .prettierrc ├── .editorconfig ├── package.json ├── .gitignore └── README.md /packages/auth/.gitkeep: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /packages/docs/.gitignore: -------------------------------------------------------------------------------- 1 | .DS_Store -------------------------------------------------------------------------------- /packages/docs/src/biz/README.md: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /provision/.gitignore: -------------------------------------------------------------------------------- 1 | data/ 2 | -------------------------------------------------------------------------------- /packages/docs/src/changelog/README.md: -------------------------------------------------------------------------------- 1 | # Changelogs 2 | -------------------------------------------------------------------------------- /packages/docs/.dockerignore: -------------------------------------------------------------------------------- 1 | node_modules/ 2 | src/.vuepress/dist/ 3 | -------------------------------------------------------------------------------- /.prettierignore: -------------------------------------------------------------------------------- 1 | /templates 2 | /node_modules 3 | /lib/**/* 4 | /CONTRIBUTING.md 5 | -------------------------------------------------------------------------------- /packages/backend/.vscode/settings.json: -------------------------------------------------------------------------------- 1 | { 2 | "editor.formatOnSave": true 3 | } 4 | -------------------------------------------------------------------------------- /packages/backend/.prettierignore: -------------------------------------------------------------------------------- 1 | /templates 2 | /node_modules 3 | /lib/**/* 4 | /CONTRIBUTING.md 5 | -------------------------------------------------------------------------------- /.devcontainer/provision/env/config.json: -------------------------------------------------------------------------------- 1 | { 2 | "packages": { 3 | "cluster": { 4 | "includes": ["*"] 5 | } 6 | } 7 | } 8 | -------------------------------------------------------------------------------- /packages/backend/.gitignore: -------------------------------------------------------------------------------- 1 | node_modules/ 2 | *.env 3 | !default.env 4 | keys/ 5 | graphql/*.graphql 6 | !graphql/schema-dev.graphql 7 | -------------------------------------------------------------------------------- /.devcontainer/provision/env/aws.env: -------------------------------------------------------------------------------- 1 | AWS_ACCESS_KEY_ID= 2 | AWS_SECRET_ACCESS_KEY= 3 | AWS_REGION=ap-southeast-1 4 | KEY_NAME= 5 | AWS_ACCOUNT_ID= 6 | -------------------------------------------------------------------------------- /packages/docs/src/.vuepress/styles/palette.styl: -------------------------------------------------------------------------------- 1 | .home { 2 | max-width: 100%; 3 | } 4 | 5 | .contains-task-list LI { 6 | list-style-type: none; 7 | } 8 | -------------------------------------------------------------------------------- /provision/.env: -------------------------------------------------------------------------------- 1 | DATABASE_USER=mbt 2 | DATABASE_PASS=Mbt!@#456 3 | DATABASE_NAME=mbt 4 | PGADMIN_DEFAULT_EMAIL=admin@ltv.vn 5 | PGADMIN_DEFAULT_PASSWORD=Mbt!@#456 6 | -------------------------------------------------------------------------------- /packages/backend/shared/core/index.ts: -------------------------------------------------------------------------------- 1 | export enum AuthSpecialRole { 2 | SYSTEM = '$system', 3 | EVERYONE = '$everyone', 4 | AUTHENTICATED = '$authenticated', 5 | OWNER = '$owner', 6 | } 7 | -------------------------------------------------------------------------------- /packages/backend/shared/prisma/migrations/migration_lock.toml: -------------------------------------------------------------------------------- 1 | # Please do not edit this file manually 2 | # It should be added in your version-control system (i.e. Git) 3 | provider = "postgresql" -------------------------------------------------------------------------------- /packages/backend/services/core/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "@svc/core", 3 | "version": "0.1.0", 4 | "description": "core service", 5 | "author": "Luc ", 6 | "private": true 7 | } 8 | -------------------------------------------------------------------------------- /packages/backend/services/interaction/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "@svc/mail", 3 | "version": "0.1.0", 4 | "description": "Mail service", 5 | "author": "Luc ", 6 | "private": true 7 | } 8 | -------------------------------------------------------------------------------- /packages/backend/services/profile/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "@svc/profile", 3 | "version": "0.1.0", 4 | "description": "Profile service", 5 | "author": "Luc ", 6 | "private": true 7 | } 8 | -------------------------------------------------------------------------------- /packages/frontend/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "@app/frontend", 3 | "version": "0.1.0", 4 | "private": true, 5 | "author": { 6 | "name": "Luc Duong", 7 | "email": "luc@ltv.vn" 8 | } 9 | } 10 | -------------------------------------------------------------------------------- /.prettierrc: -------------------------------------------------------------------------------- 1 | { 2 | "useTabs": false, 3 | "printWidth": 100, 4 | "semi": true, 5 | "tabWidth": 2, 6 | "singleQuote": true, 7 | "trailingComma": "all", 8 | "jsxBracketSameLine": false, 9 | "parser": "babel" 10 | } 11 | -------------------------------------------------------------------------------- /packages/backend/.prettierrc: -------------------------------------------------------------------------------- 1 | { 2 | "useTabs": false, 3 | "printWidth": 100, 4 | "semi": false, 5 | "tabWidth": 2, 6 | "singleQuote": true, 7 | "trailingComma": "all", 8 | "jsxBracketSameLine": false, 9 | "parser": "babel" 10 | } 11 | -------------------------------------------------------------------------------- /provision/clean.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | echo 'DOCKER VERSION' 3 | docker -v 4 | 5 | echo 'DOCKER COMPOSE VERSION' 6 | docker-compose -v 7 | 8 | echo 'CREATE DATA DIRECTORY' 9 | mkdir -p ./data 10 | 11 | echo 'COMPOSE DOWN' 12 | docker-compose --project-name=mbt down 13 | -------------------------------------------------------------------------------- /provision/setup.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | echo 'DOCKER VERSION' 3 | docker -v 4 | 5 | echo 'DOCKER COMPOSE VERSION' 6 | docker-compose -v 7 | 8 | echo 'CREATE DATA DIRECTORY' 9 | mkdir -p ./data 10 | 11 | echo 'COMPOSE UP' 12 | docker-compose --project-name=mbt up -d 13 | -------------------------------------------------------------------------------- /packages/backend/services/storage/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "@svc/storage", 3 | "version": "0.1.0", 4 | "description": "storage service", 5 | "author": "Luc ", 6 | "private": true, 7 | "dependencies": { 8 | "aws-sdk": "^2.997.0" 9 | } 10 | } 11 | -------------------------------------------------------------------------------- /packages/backend/services/firebase/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "@svc/firebase", 3 | "version": "0.1.0", 4 | "description": "firebase service", 5 | "author": "Luc ", 6 | "private": true, 7 | "dependencies": { 8 | "firebase-admin": "^9.12.0" 9 | } 10 | } 11 | -------------------------------------------------------------------------------- /packages/backend/shared/@types/prisma/index.d.ts: -------------------------------------------------------------------------------- 1 | import '@prisma/client' 2 | 3 | declare module '@prisma/client' { 4 | // eslint-disable-next-line @typescript-eslint/no-explicit-any 5 | type GlobalReject = 'rejectOnNotFound' extends keyof T ? T['rejectOnNotFound'] : false 6 | } 7 | -------------------------------------------------------------------------------- /provision/images/node-builder-12-alpine/Dockerfile: -------------------------------------------------------------------------------- 1 | # syntax = docker/dockerfile:experimental 2 | # ===== Builder ===== 3 | # =================== 4 | FROM node:12-alpine AS builder 5 | 6 | RUN apk --no-cache add \ 7 | g++ make python git \ 8 | && yarn global add node-gyp \ 9 | && rm -rf /var/cache/apk/* 10 | -------------------------------------------------------------------------------- /packages/backend/shared/utils/cors.ts: -------------------------------------------------------------------------------- 1 | export function getCORSFromEnv(): boolean | string[] { 2 | const { CORS_ORIGIN } = process.env 3 | if (!CORS_ORIGIN || ['true', 'false', true, false].indexOf(CORS_ORIGIN) !== -1) { 4 | return CORS_ORIGIN == 'true' 5 | } 6 | return CORS_ORIGIN.replace(/\s+/, '').split(',') 7 | } 8 | -------------------------------------------------------------------------------- /packages/backend/services/api-gateway/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "@svc/api-gateway", 3 | "version": "0.1.0", 4 | "description": "API Gateway", 5 | "author": "Luc ", 6 | "private": true, 7 | "dependencies": { 8 | "firebase-admin": "^9.12.0", 9 | "moleculer-apollo-server": "^0.3.4", 10 | "moleculer-web": "^0.10.2" 11 | } 12 | } 13 | -------------------------------------------------------------------------------- /packages/docs/Dockerfile: -------------------------------------------------------------------------------- 1 | # ===== Builder ===== 2 | # =================== 3 | FROM node:10.15.0 AS builder 4 | 5 | WORKDIR /app/ 6 | ADD docs/package.json . 7 | RUN yarn install 8 | ADD docs/src ./src 9 | RUN yarn build 10 | 11 | # ===== Image ===== 12 | # ================== 13 | FROM nginx:alpine AS docs 14 | WORKDIR /usr/share/nginx/html 15 | COPY --from=builder /app/src/.vuepress/dist/ . 16 | -------------------------------------------------------------------------------- /packages/backend/shared/middlewares/env.middleware.ts: -------------------------------------------------------------------------------- 1 | /* eslint-disable @typescript-eslint/no-explicit-any */ 2 | import { Context } from 'moleculer' 3 | import env from '@ltv/env' 4 | 5 | export default { 6 | name: 'EnvMiddleware', 7 | 8 | localAction(next: (ctx: Context) => Promise) { 9 | return function (ctx: Context) { 10 | ctx.env = env 11 | return next(ctx) 12 | } 13 | }, 14 | } as any 15 | -------------------------------------------------------------------------------- /packages/docs/src/README.md: -------------------------------------------------------------------------------- 1 | --- 2 | home: true 3 | actionText: Get Started → 4 | actionLink: /dev/ 5 | features: 6 | - title: Development Documents 7 | details: Setup Environments, Coding, ... 8 | - title: Business Requirements 9 | details: Build by developers for developers 10 | - title: Latest Version 11 | details: v0.1.0 -> CHANGELOG (/changelog) 12 | footer: Moleculer Boilerplate for Typescript | Copyright © 2020 by LTV 13 | --- 14 | -------------------------------------------------------------------------------- /.editorconfig: -------------------------------------------------------------------------------- 1 | # EditorConfig helps developers define and maintain consistent 2 | # coding styles between different editors and IDEs 3 | # editorconfig.org 4 | 5 | root = true 6 | 7 | [*] 8 | end_of_line = lf 9 | charset = utf-8 10 | trim_trailing_whitespace = true 11 | insert_final_newline = true 12 | indent_style = space 13 | indent_size = 2 14 | 15 | [*.txt] 16 | indent_style = tab 17 | indent_size = 4 18 | 19 | [*.{diff,md}] 20 | trim_trailing_whitespace = false 21 | -------------------------------------------------------------------------------- /packages/backend/.editorconfig: -------------------------------------------------------------------------------- 1 | # EditorConfig helps developers define and maintain consistent 2 | # coding styles between different editors and IDEs 3 | # editorconfig.org 4 | 5 | root = true 6 | 7 | [*] 8 | end_of_line = lf 9 | charset = utf-8 10 | trim_trailing_whitespace = true 11 | insert_final_newline = true 12 | indent_style = space 13 | indent_size = 2 14 | 15 | [*.txt] 16 | indent_style = tab 17 | indent_size = 4 18 | 19 | [*.{diff,md}] 20 | trim_trailing_whitespace = false 21 | -------------------------------------------------------------------------------- /packages/backend/services/interaction/mail.svc.ts: -------------------------------------------------------------------------------- 1 | import { Event, Service } from 'moleculer-decorators' 2 | import { BaseService } from 'shared/core/base.svc' 3 | 4 | @Service({ 5 | name: 'mail', 6 | }) 7 | export default class MailService extends BaseService { 8 | send() { 9 | console.log('>> send: ') 10 | } 11 | 12 | @Event({ 13 | name: 'auth.passwordChanged', 14 | }) 15 | sendPasswordChangedEmail() { 16 | console.log('[sendPasswordChangedEmail]') 17 | this.send() 18 | } 19 | } 20 | -------------------------------------------------------------------------------- /packages/backend/services/storage/s3.svc.ts: -------------------------------------------------------------------------------- 1 | import { Context } from 'moleculer' 2 | import { Action, Service } from 'moleculer-decorators' 3 | import { AuthSpecialRole } from 'shared/core' 4 | import { BaseService } from 'shared/core/base.svc' 5 | 6 | @Service({ 7 | name: 's3', 8 | mixins: [], 9 | }) 10 | export default class S3Service extends BaseService { 11 | @Action({ 12 | permissions: [AuthSpecialRole.AUTHENTICATED], 13 | }) 14 | getPresignedUrl(ctx: Context) { 15 | return ctx.params 16 | } 17 | } 18 | -------------------------------------------------------------------------------- /packages/docs/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "@app/docs", 3 | "version": "0.1.0", 4 | "private": true, 5 | "author": { 6 | "name": "Luc Duong", 7 | "email": "luc@ltv.vn" 8 | }, 9 | "scripts": { 10 | "serve": "vuepress dev src", 11 | "build": "vuepress build src" 12 | }, 13 | "devDependencies": { 14 | "@vuepress/plugin-back-to-top": "^1.3.0", 15 | "cz-conventional-changelog": "^3.1.0", 16 | "markdown-it-task-lists": "^2.1.1", 17 | "vuepress": "^1.3.0" 18 | }, 19 | "license": "private" 20 | } 21 | -------------------------------------------------------------------------------- /packages/docs/src/dev/README.md: -------------------------------------------------------------------------------- 1 | # Development 2 | 3 | ## Build Image 4 | 5 | Build an image using script `yarn build:{workspace}:image {TAG_NAME}` 6 | 7 | - Docs: `yarn build:docs:image docker.pkg.github.com/ltv/moleculer-boilerplate-ts/docs:0.1.0` 8 | - Frontend: `yarn build:frontend:image docker.pkg.github.com/ltv/moleculer-boilerplate-ts/frontend:0.1.0` 9 | - Backend: `yarn build:backend:image docker.pkg.github.com/ltv/moleculer-boilerplate-ts/backend:0.1.0` 10 | - Database: `yarn build:database:image docker.pkg.github.com/ltv/moleculer-boilerplate-ts/database:0.1.0` 11 | -------------------------------------------------------------------------------- /packages/backend/tools/firebase/index.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | Sample FirebaseUI App 6 | 7 | 8 | 9 | 10 | 12 | 13 | 14 |
15 | 16 | 17 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "moleculer-boilerplate-ts", 3 | "private": true, 4 | "dependencies": {}, 5 | "workspaces": [ 6 | "packages/*" 7 | ], 8 | "scripts": { 9 | "serve:docs": "yarn workspace @app/docs run serve", 10 | "serve:backend": "yarn workspace @app/backend run serve", 11 | "build:docs:image": "docker build --file packages/docs/Dockerfile packages --tag", 12 | "build:backend:image": "docker build --file packages/docs/Dockerfile packages --tag", 13 | "build:frontend:image": "docker build --file packages/docs/Dockerfile packages --tag" 14 | } 15 | } 16 | -------------------------------------------------------------------------------- /packages/backend/.vscode/extensions.json: -------------------------------------------------------------------------------- 1 | { 2 | "recommendations": [ 3 | "esbenp.prettier-vscode", 4 | "dbaeumer.vscode-eslint", 5 | "dbaeumer.vscode-eslint", 6 | "aaron-bond.better-comments", 7 | "streetsidesoftware.code-spell-checker", 8 | "mikestead.dotenv", 9 | "codezombiech.gitignore", 10 | "eamodio.gitlens", 11 | "wix.vscode-import-cost", 12 | "orta.vscode-jest", 13 | "yzhang.markdown-all-in-one", 14 | "davidanson.vscode-markdownlint", 15 | "christian-kohler.path-intellisense", 16 | "wayou.vscode-todo-highlight", 17 | "prisma.prisma" 18 | ] 19 | } 20 | -------------------------------------------------------------------------------- /provision/setup-docker.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | # Install docker 3 | curl -fsSL get.docker.com -o get-docker.sh 4 | sudo sh get-docker.sh 5 | docker -v 6 | 7 | # Install docker-compose 8 | sudo curl -L https://github.com/docker/compose/releases/download/1.22.0/docker-compose-$(uname -s)-$(uname -m) -o /usr/local/bin/docker-compose 9 | sudo chmod +x /usr/local/bin/docker-compose 10 | docker-compose -v 11 | 12 | sudo sed -i '/^ExecStart.*$/c\ExecStart=/usr/bin/dockerd -H unix:// -H tcp://0.0.0.0:6513' /lib/systemd/system/docker.service 13 | # Restart docker daemon 14 | sudo systemctl daemon-reload 15 | sudo systemctl restart docker 16 | -------------------------------------------------------------------------------- /.devcontainer/scripts/install-firacode.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | fonts_dir="${HOME}/.local/share/fonts" 4 | if [ ! -d "${fonts_dir}" ]; then 5 | echo "mkdir -p $fonts_dir" 6 | mkdir -p "${fonts_dir}" 7 | else 8 | echo "Found fonts dir $fonts_dir" 9 | fi 10 | 11 | for type in Bold Light Medium Regular Retina; do 12 | file_path="${HOME}/.local/share/fonts/FiraCode-${type}.ttf" 13 | file_url="https://github.com/tonsky/FiraCode/blob/master/distr/ttf/FiraCode-${type}.ttf?raw=true" 14 | if [ ! -e "${file_path}" ]; then 15 | echo "wget -O $file_path $file_url" 16 | wget -O "${file_path}" "${file_url}" 17 | else 18 | echo "Found existing file $file_path" 19 | fi 20 | done 21 | 22 | echo "fc-cache -f" 23 | fc-cache -f 24 | -------------------------------------------------------------------------------- /packages/backend/services/profile/profile-db.svc.ts: -------------------------------------------------------------------------------- 1 | import { Prisma } from '.prisma/client' 2 | import { GlobalReject } from '@prisma/client' 3 | import { Context } from 'moleculer' 4 | import { Action, Service } from 'moleculer-decorators' 5 | import { PrismaService } from 'shared/core/prisma.svc' 6 | import { PrismaMixin } from 'shared/mixins/prisma.mixin' 7 | 8 | type ModelDelegate = Prisma.ProfileDelegate 9 | 10 | @Service({ 11 | name: 'profile-db', 12 | mixins: [ 13 | PrismaMixin({ 14 | prisma: { model: 'Profile' }, 15 | graphql: true, 16 | }), 17 | ], 18 | }) 19 | export default class ProfileDbService extends PrismaService { 20 | @Action({ 21 | name: 'changePassword', 22 | }) 23 | changePassword(ctx: Context) { 24 | ctx.emit('auth.passwordChanged', ':)') 25 | } 26 | } 27 | -------------------------------------------------------------------------------- /packages/backend/shared/prisma/schema.prisma: -------------------------------------------------------------------------------- 1 | // This is your Prisma schema file, 2 | // learn more about it in the docs: https://pris.ly/d/prisma-schema 3 | 4 | datasource db { 5 | provider = "postgresql" 6 | url = env("DATABASE_URL") 7 | } 8 | 9 | generator client { 10 | provider = "prisma-client-js" 11 | } 12 | 13 | model Config { 14 | key String @id 15 | value String 16 | isDefault Boolean @default(false) 17 | createdAt DateTime? @default(now()) 18 | updatedAt DateTime? @updatedAt 19 | } 20 | 21 | model Profile { 22 | id Int @id @default(autoincrement()) 23 | userId String @unique 24 | email String? @unique 25 | displayName String? @db.VarChar(50) 26 | photoUrl String? 27 | address String? 28 | } 29 | 30 | enum Role { 31 | AUTHENTICATED 32 | ADMIN 33 | } 34 | -------------------------------------------------------------------------------- /packages/backend/shared/@types/moleculer/index.d.ts: -------------------------------------------------------------------------------- 1 | import 'moleculer' 2 | declare module 'moleculer' { 3 | // interface ActionSchema { 4 | // permissions?: string[] 5 | // } 6 | 7 | type EnvFunc = (key: string, defaultValue?: T) => T 8 | type Utils = { 9 | string: (key: string, defaultValue?: string) => string | undefined 10 | int: (key: string, defaultValue?: number) => number | undefined 11 | float: (key: string, defaultValue?: number) => number | undefined 12 | bool: (key: string, defaultValue?: boolean) => boolean | undefined 13 | json: (key: string, defaultValue?: T) => T | undefined 14 | array: (key: string, defaultValue?: string[]) => string[] | undefined 15 | date: (key: string, defaultValue?: Date) => Date | undefined 16 | } 17 | 18 | interface Context { 19 | env: EnvFunc & Utils 20 | } 21 | } 22 | -------------------------------------------------------------------------------- /packages/backend/services/api-gateway/api.svc.ts: -------------------------------------------------------------------------------- 1 | import env from '@ltv/env' 2 | import { Event, Service } from 'moleculer-decorators' 3 | import ApiService from 'moleculer-web' 4 | import { BaseService } from 'shared/core/base.svc' 5 | import { ApolloMixin } from 'shared/mixins/apollo-server.mixin' 6 | import { FirebaseAuthMixin } from 'shared/mixins/firebase.mixin' 7 | import { getCORSFromEnv } from 'shared/utils/cors' 8 | import { routes } from './routes' 9 | 10 | @Service({ 11 | name: 'gateway', 12 | mixins: [ApiService, ApolloMixin, FirebaseAuthMixin], 13 | settings: { 14 | port: env('PORT', 4000), 15 | routes, 16 | cors: { 17 | origin: getCORSFromEnv(), 18 | }, 19 | // use: [helmet()], // rely on reverse proxy. :D 20 | }, 21 | }) 22 | export default class RestAPIGateway extends BaseService { 23 | @Event() 24 | generatedSchemaFromDb(): void { 25 | this.invalidateGraphQLSchema() 26 | } 27 | } 28 | -------------------------------------------------------------------------------- /packages/backend/default.env: -------------------------------------------------------------------------------- 1 | # molculer config 2 | NODE_ENV=development 3 | NAMESPACE=mbt 4 | PORT=4000 5 | LOGGER_TYPE=Console 6 | LOGGER_LEVEL=info 7 | TRANSPORTER_URL=nats://localhost:4222 8 | CORS_ORIGIN=* 9 | 10 | # HealthCheck 11 | HEALTH_CHECK_READINESS_PATH=/ready 12 | HEALTH_CHECK_LIVENESS_PATH=/live 13 | HEALTH_CHECK_PORT=3001 14 | 15 | # Metrics 16 | METRIC_PORT=3031 17 | METRIC_PATH=/metrics 18 | 19 | # Tracing 20 | TRACING_TYPE=Console 21 | 22 | # Database 23 | DATABASE_HOST=localhost 24 | DATABASE_NAME=mbt 25 | DATABASE_USER=mbt 26 | DATABASE_PASS=Mbt123789 27 | DATABASE_PORT=5432 28 | DATABASE_POOL_MIN=1 29 | DATABASE_POOL_MAX=3 30 | DATABASE_SCHEMA=app 31 | 32 | ADMIN_PASSWORD=Admin@1234 33 | ADMIN_EMAIL=admin@ltv.dev 34 | 35 | # Redis 36 | REDIS_PREFIX=MBT 37 | REDIS_HOST=localhost 38 | REDIS_PORT=6379 39 | 40 | # GraphQL 41 | APOLLO_KEY= 42 | 43 | # Firebase 44 | GOOGLE_APPLICATION_CREDENTIALS=keys/serviceaccounts.json 45 | -------------------------------------------------------------------------------- /packages/backend/shared/core/base.svc.ts: -------------------------------------------------------------------------------- 1 | import { GenericObject, Service, ServiceSettingSchema } from 'moleculer' 2 | 3 | export type MemoizeOptions = { 4 | ttl?: number 5 | } 6 | 7 | export class BaseService extends Service { 8 | public config: GenericObject = {} 9 | 10 | // memoize (S) 11 | protected async memoize( 12 | name: string, 13 | params: P, 14 | callback: () => Promise, 15 | options?: MemoizeOptions, 16 | ): Promise { 17 | if (!this.broker.cacher) return callback() 18 | 19 | const key = this.broker.cacher.defaultKeygen(`${name}:memoize-${name}`, params as any, {}, []) 20 | 21 | let res = await this.broker.cacher.get(key) 22 | if (res) return res 23 | 24 | res = await callback() 25 | this.broker.cacher.set(key, res, options && options.ttl) 26 | 27 | return res 28 | } 29 | // memoize (E) 30 | } 31 | -------------------------------------------------------------------------------- /packages/backend/shared/prisma/migrations/20211001002439_init/migration.sql: -------------------------------------------------------------------------------- 1 | -- CreateEnum 2 | CREATE TYPE "Role" AS ENUM ('AUTHENTICATED', 'ADMIN'); 3 | 4 | -- CreateTable 5 | CREATE TABLE "Config" ( 6 | "key" TEXT NOT NULL, 7 | "value" TEXT NOT NULL, 8 | "isDefault" BOOLEAN NOT NULL DEFAULT false, 9 | "createdAt" TIMESTAMP(3) DEFAULT CURRENT_TIMESTAMP, 10 | "updatedAt" TIMESTAMP(3), 11 | 12 | CONSTRAINT "Config_pkey" PRIMARY KEY ("key") 13 | ); 14 | 15 | -- CreateTable 16 | CREATE TABLE "Profile" ( 17 | "id" SERIAL NOT NULL, 18 | "userId" TEXT NOT NULL, 19 | "email" TEXT, 20 | "displayName" VARCHAR(50), 21 | "photoUrl" TEXT, 22 | "address" TEXT, 23 | 24 | CONSTRAINT "Profile_pkey" PRIMARY KEY ("id") 25 | ); 26 | 27 | -- CreateIndex 28 | CREATE UNIQUE INDEX "Profile_userId_key" ON "Profile"("userId"); 29 | 30 | -- CreateIndex 31 | CREATE UNIQUE INDEX "Profile_email_key" ON "Profile"("email"); 32 | -------------------------------------------------------------------------------- /packages/backend/shared/utils/prisma.ts: -------------------------------------------------------------------------------- 1 | import { Prisma, PrismaClient } from '.prisma/client' 2 | import { LoggerInstance } from 'moleculer' 3 | 4 | export const customPrismaLog = (prisma: PrismaClient, logger: LoggerInstance) => { 5 | prisma.$on < 6 | any > 7 | ('query', 8 | (e: Prisma.QueryEvent) => { 9 | logger.info( 10 | '\n\x1b[36m -> Query: \x1b[0m', 11 | `\x1b[35m ${e.query} \x1b[0m`, 12 | '\n\x1b[36m -> Params: \x1b[0m', 13 | `\x1b[35m ${e.params} \x1b[0m`, 14 | '\n\x1b[36m -> Duration: \x1b[0m', 15 | `\x1b[35m ${e.duration} \x1b[0m`, 16 | ) 17 | }) 18 | // prisma.$on('info', (e: Prisma.LogEvent) => { 19 | // logger.info(e.message) 20 | // }) 21 | // eslint-disable-next-line @typescript-eslint/no-explicit-any 22 | prisma.$on < 23 | any > 24 | ('error', 25 | (e: Prisma.LogEvent) => { 26 | logger.error(e.message) 27 | }) 28 | return prisma 29 | } 30 | -------------------------------------------------------------------------------- /packages/backend/shared/@types/index.d.ts: -------------------------------------------------------------------------------- 1 | interface AppMeta { 2 | userId?: string 3 | clientIp?: string 4 | token?: string 5 | user?: User 6 | } 7 | 8 | interface User { 9 | id: string 10 | } 11 | 12 | interface GraphQLTransformPayloadOptions { 13 | totalCount?: () => Promise 14 | } 15 | 16 | interface GraphQLMutationInputType { 17 | [key: string]: T 18 | } 19 | 20 | interface GraphQLClientMutation { 21 | clientMutationId: string 22 | } 23 | 24 | interface GraphQLMutationInput { 25 | input: GraphQLMutationInputType & GraphQLClientMutation 26 | } 27 | 28 | interface GraphQLSinglePayload { 29 | [key: string]: T 30 | } 31 | 32 | interface GraphQLPayload { 33 | nodes: T[] 34 | totalCount?: number 35 | } 36 | 37 | interface GraphQLQueryInput { 38 | first?: number 39 | last?: number 40 | offset?: number 41 | orderBy?: string[] 42 | condition?: T 43 | filter?: FilterType 44 | } 45 | -------------------------------------------------------------------------------- /packages/backend/shared/utils/db.ts: -------------------------------------------------------------------------------- 1 | import { Pool, PoolConfig } from 'pg' 2 | import env from '@ltv/env' 3 | 4 | let poolInstance: Pool 5 | 6 | export function getConnectionConfig(): PoolConfig { 7 | const host = env('DATABASE_HOST') 8 | const database = env('DATABASE_NAME') 9 | const user = env('DATABASE_USER') 10 | const password = env('DATABASE_PASS') 11 | const port = env.int('DATABASE_PORT', 5432) 12 | const ssl = env.bool('DATABASE_SSL', false) 13 | 14 | const min = env.int('DATABASE_POOL_MIN', 1) 15 | const max = env.int('DATABASE_POOL_MAX', 3) 16 | 17 | return { 18 | host, 19 | database, 20 | user, 21 | password, 22 | port, 23 | ssl, 24 | 25 | // pool min, max 26 | min, 27 | max, 28 | } 29 | } 30 | 31 | export function createPGPoolInstance(): Pool { 32 | const cConfig = getConnectionConfig() 33 | if (!poolInstance) { 34 | poolInstance = new Pool(cConfig) 35 | } 36 | return poolInstance 37 | } 38 | -------------------------------------------------------------------------------- /packages/backend/services/api-gateway/routes.ts: -------------------------------------------------------------------------------- 1 | /* eslint-disable @typescript-eslint/explicit-module-boundary-types */ 2 | /* eslint-disable @typescript-eslint/no-explicit-any */ 3 | import { ClientRequest } from 'http' 4 | import { Context } from 'moleculer' 5 | 6 | export const routes: any[] = [ 7 | { 8 | path: '/api/v1', 9 | etag: true, 10 | camelCaseNames: true, 11 | authentication: false, 12 | autoAliases: false, 13 | aliases: { 14 | // 'GET /auth/me': `users.me`, 15 | }, 16 | // eslint-disable-next-line @typescript-eslint/explicit-module-boundary-types 17 | onBeforeCall( 18 | ctx: Context, 19 | _: any, 20 | req: ClientRequest & { headers: { [key: string]: string } }, 21 | ) { 22 | this.logger.info('onBeforeCall in protected route') 23 | ctx.meta.clientIp = req.headers['x-forwarded-for'] || req.socket.remoteAddress 24 | 25 | this.logger.info('Request from client: ', ctx.meta.clientIp) 26 | }, 27 | }, 28 | ] 29 | export default {} 30 | -------------------------------------------------------------------------------- /packages/backend/tools/create-env.js: -------------------------------------------------------------------------------- 1 | const path = require('path') 2 | const fs = require('fs') 3 | const { createPrismaEnv } = require('./prisma-env') 4 | 5 | const defaultEnvPath = path.resolve(__dirname, '..', 'default.env') 6 | const localEnvPath = path.resolve(__dirname, '..', 'local.env') 7 | 8 | const createEnv = async () => { 9 | if (fs.existsSync(localEnvPath)) { 10 | return 11 | } 12 | fs.copyFileSync(defaultEnvPath, localEnvPath) 13 | console.log('Copied environment from default.env to local.env') 14 | } 15 | 16 | const writeDbURL2LocalEnv = (dbUrl) => { 17 | const localEnvBody = fs.readFileSync(localEnvPath, 'utf8') 18 | if (localEnvBody.indexOf('DATABASE_URL') !== -1) { 19 | return 20 | } 21 | const dbInfo = `## PRISMA DATABASE_URL -> Generated from .env\n${dbUrl}\n` 22 | fs.writeFileSync(localEnvPath, `${dbInfo}\n${localEnvBody}`, { encoding: 'utf8' }) 23 | } 24 | 25 | Promise.resolve() 26 | .then(createEnv) 27 | .then(createPrismaEnv) 28 | .then(writeDbURL2LocalEnv) 29 | .then(() => { 30 | console.log('Created prisma env at .env') 31 | }) 32 | -------------------------------------------------------------------------------- /packages/backend/services/firebase/firebase.svc.ts: -------------------------------------------------------------------------------- 1 | import admin from 'firebase-admin' 2 | import { Event } from 'moleculer-decorators' 3 | import { AuthSpecialRole } from 'shared/core' 4 | import { BaseService } from 'shared/core/base.svc' 5 | 6 | export default class FirebaseService extends BaseService { 7 | created(): void { 8 | this.initFirebaseAdmin() 9 | } 10 | 11 | initFirebaseAdmin(): void { 12 | if (admin.app.length > 0) { 13 | return 14 | } 15 | admin.initializeApp() 16 | } 17 | 18 | @Event({ 19 | name: 'auth.resolvedToken', 20 | }) 21 | async setCustomUserClaims(profile: { userId: string, role: AuthSpecialRole }): Promise { 22 | const cacheId = `profile.${profile.userId}` 23 | this.logger.debug(`[resolvedToken] > ${profile.userId}`) 24 | const hasProfileInCached = await this.broker.cacher.get(cacheId).then((rtnPrf) => !!rtnPrf) 25 | if (hasProfileInCached) { 26 | return 27 | } 28 | await admin 29 | .auth() 30 | .setCustomUserClaims(profile.userId, { role: profile.role.toLocaleLowerCase() }) 31 | } 32 | } 33 | -------------------------------------------------------------------------------- /packages/backend/tools/prisma-env.js: -------------------------------------------------------------------------------- 1 | const path = require('path') 2 | const fs = require('fs') 3 | 4 | const prismaEnvPath = path.resolve(__dirname, '..', '.env') 5 | const header = '# Generated by tools/prisma-env.js\n# Please do not edit this file manually' 6 | 7 | const createPrismaEnv = async () => { 8 | const env = require('@ltv/env') 9 | const host = env('DATABASE_HOST') 10 | const name = env('DATABASE_NAME') 11 | const user = env('DATABASE_USER') 12 | const pass = env('DATABASE_PASS') 13 | const schema = env('DATABASE_SCHEMA', 'public') 14 | const port = env.int('DATABASE_PORT', 5432) 15 | const ssl = env.bool('DATABASE_SSL', false) 16 | let connection = `postgresql://${user}:${pass}@${host}:${port}/${name}?schema=${schema}` 17 | if (ssl) { 18 | connection += '&sslmode=require' 19 | } 20 | fs.writeFileSync(prismaEnvPath, `${header}\nDATABASE_URL=${connection}`) 21 | return `DATABASE_URL=${connection}` 22 | } 23 | 24 | // createPrismaEnv().then(() => { 25 | // console.log('Created prisma env at .env') 26 | // }) 27 | module.exports = { 28 | createPrismaEnv, 29 | } 30 | -------------------------------------------------------------------------------- /.devcontainer/provision/app/docker-compose.yaml: -------------------------------------------------------------------------------- 1 | version: "3.6" 2 | services: 3 | mongodb: 4 | image: mongo:latest 5 | ports: 6 | - 27017:27017 7 | environment: 8 | - MONGO_DATA_DIR=/data/db 9 | volumes: 10 | - mongodb:/data/db 11 | networks: 12 | - app 13 | 14 | nats: 15 | image: nats:1.3.0-linux 16 | container_name: nats 17 | restart: always 18 | ports: 19 | - 4222:4222 20 | - 4444:4444 21 | - 6222:6222 22 | - 8222:8222 23 | networks: 24 | - app 25 | 26 | redis: 27 | image: redis:5-alpine 28 | container_name: redis5 29 | hostname: redis5 30 | restart: always 31 | ports: 32 | - 6379:6379 33 | networks: 34 | - app 35 | 36 | redis-commander: 37 | image: rediscommander/redis-commander 38 | container_name: redis-commander 39 | hostname: redis-commander 40 | restart: always 41 | ports: 42 | - 6380:8081 43 | networks: 44 | - app 45 | environment: 46 | REDIS_HOSTS: local:redis5:6379 47 | 48 | volumes: 49 | mongodb: 50 | 51 | networks: 52 | app: 53 | external: true 54 | -------------------------------------------------------------------------------- /packages/backend/Dockerfile: -------------------------------------------------------------------------------- 1 | # ===== Builder ===== 2 | # =================== 3 | FROM node:14-alpine AS builder 4 | 5 | ARG service 6 | 7 | RUN apk --no-cache add \ 8 | g++ make python3 git \ 9 | && yarn global add node-gyp \ 10 | && rm -rf /var/cache/apk/* 11 | 12 | WORKDIR /builder/ 13 | 14 | # Cache backend's package 15 | ADD services/${service}/package.json services/${service}/ 16 | ADD package.json . 17 | ADD yarn.lock . 18 | ADD build.js . 19 | ADD tsconfig.json . 20 | 21 | RUN yarn --ignore-scripts --frozen-lockfile 22 | 23 | # Cache backend's src 24 | ADD services/$service services/$service 25 | ADD shared shared 26 | ADD tools tools 27 | 28 | RUN yarn prisma generate --schema ./shared/prisma/schema.prisma 29 | 30 | # Build 31 | RUN yarn build $service 32 | 33 | # ===== backend ===== 34 | # ================== 35 | FROM node:14-alpine AS backend 36 | 37 | ARG service 38 | 39 | WORKDIR /app/ 40 | COPY --from=builder /builder/dist/$service/ . 41 | RUN yarn --production --frozen-lockfile 42 | -------------------------------------------------------------------------------- /packages/backend/tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "compilerOptions": { 3 | "experimentalDecorators": true, 4 | "emitDecoratorMetadata": true, 5 | "allowSyntheticDefaultImports": true, 6 | "esModuleInterop": true, 7 | "module": "commonjs", 8 | "target": "es2019", 9 | "allowJs": true, 10 | "noImplicitAny": true, 11 | "moduleResolution": "node", 12 | "resolveJsonModule": true, 13 | "sourceMap": false, 14 | "outDir": "./dist", 15 | "baseUrl": "./", 16 | "paths": { 17 | "shared/*": ["shared/*"], 18 | "services/*": ["services/*"] 19 | }, 20 | "typeRoots": ["../../node_modules/@types", "node_modules/@types", "shared/@types"], 21 | "lib": [ 22 | "es5", 23 | "es6", 24 | "es2015.core", 25 | "es2015.collection", 26 | "es2015.generator", 27 | "es2015.iterable", 28 | "es2015.promise", 29 | "es2015.proxy", 30 | "es2015.reflect", 31 | "es2015.symbol", 32 | "es2015.symbol.wellknown", 33 | "es2018", 34 | "es2020", 35 | "esnext.asynciterable" 36 | ] 37 | }, 38 | "exclude": ["**/*.test.ts", "dist/**", "tests/**/*"], 39 | "ts-node": { 40 | "files": true 41 | }, 42 | } 43 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Logs 2 | /logs 3 | *.log 4 | 5 | # Runtime data 6 | pids 7 | *.pid 8 | *.seed 9 | tmp 10 | blockNo 11 | 12 | # Directory for instrumented libs generated by jscoverage/JSCover 13 | lib-cov 14 | 15 | # Coverage directory used by tools like istanbul 16 | coverage 17 | 18 | # Test report folder 19 | test_reports 20 | tests/e2e/reports 21 | 22 | # Grunt intermediate storage (http://gruntjs.com/creating-plugins#storing-task-files) 23 | .grunt 24 | 25 | # Gulp 26 | #.gulpfile 27 | 28 | # Dependency directory 29 | # Commenting this out is preferred by some people, see 30 | # https://npmjs.org/doc/faq.html#Should-I-check-my-node_modules-folder-into-git 31 | node_modules 32 | npm-debug.log 33 | 34 | # SASS temp files 35 | .sass-cache 36 | 37 | # I18next missing files 38 | *.missing.json 39 | 40 | # Bower 41 | .bower-*/ 42 | bower_components 43 | 44 | # Users Environment Variables 45 | .lock-wscript 46 | 47 | # Sublime 48 | .sublime-project 49 | *.sublime-project 50 | *.sublime-workspace 51 | 52 | # SVN 53 | .svn 54 | 55 | .DS_Store 56 | Thumbs.db 57 | 58 | # Secret file 59 | # /config.js 60 | # config.json 61 | 62 | # Data folder 63 | # data 64 | .DS_Store 65 | 66 | get-docker.sh 67 | dist/ 68 | .env.* 69 | !.env.default 70 | generated-schema.gql 71 | -------------------------------------------------------------------------------- /packages/backend/shared/core/prisma.svc.ts: -------------------------------------------------------------------------------- 1 | /* eslint-disable @typescript-eslint/explicit-module-boundary-types, @typescript-eslint/no-explicit-any */ 2 | import { PrismaClient } from '@prisma/client' 3 | import has from 'lodash/has' 4 | import lowerFirst from 'lodash/lowerFirst' 5 | import { Context, GenericObject, ServiceSettingSchema } from 'moleculer' 6 | import { BaseService } from './base.svc' 7 | 8 | export interface DatabaseSettings { 9 | table: string 10 | } 11 | export interface BaseServiceSettings extends ServiceSettingSchema { 12 | database?: DatabaseSettings 13 | } 14 | 15 | export type MemoizeOptions = { 16 | ttl?: number 17 | } 18 | 19 | export class PrismaService extends BaseService { 20 | public prisma!: PrismaClient 21 | public get model(): ModelDelegate { 22 | const name = this.settings.database?.table 23 | if (!name) { 24 | return null 25 | } 26 | if (!has(this.prisma, lowerFirst(name))) { 27 | return null 28 | } 29 | return (this.prisma as any)[lowerFirst(name)] 30 | } 31 | public config: GenericObject = {} 32 | 33 | public createdBy(ctx: Context) { 34 | return { connect: { id: ctx.meta.userId } } 35 | } 36 | 37 | public transformUpdateInput(data: GenericObject) { 38 | const keys = Object.keys(data) 39 | return keys.reduce((carry, key) => ({ ...carry, [key]: { set: data[key] } }), {}) as T 40 | } 41 | } 42 | -------------------------------------------------------------------------------- /packages/backend/services/profile/profile-gql.svc.ts: -------------------------------------------------------------------------------- 1 | import { Profile } from '@prisma/client' 2 | import omit from 'lodash/omit' 3 | import { Event, Service } from 'moleculer-decorators' 4 | import { GraphQLService } from 'shared/core/graphql.svc' 5 | import { GraphQLMixin } from 'shared/mixins/graphql.mixin' 6 | 7 | @Service({ 8 | name: 'profile-gql', 9 | mixins: [ 10 | GraphQLMixin({ 11 | table: { 12 | name: 'Profile', 13 | primary: { 14 | id: 'Int', 15 | }, 16 | }, 17 | }), 18 | ], 19 | }) 20 | export default class ProfileGQLService extends GraphQLService { 21 | @Event({ 22 | name: 'auth.resolvedToken', 23 | }) 24 | async createUser(profile: Profile): Promise { 25 | const cacheId = `profile.${profile.userId}` 26 | this.logger.debug(`[resolvedToken] > ${profile.userId}`) 27 | const hasProfileInCached = await this.broker.cacher.get(cacheId).then((rtnPrf) => !!rtnPrf) 28 | if (hasProfileInCached) { 29 | return 30 | } 31 | const update = omit(profile, 'id', 'userId') 32 | const create = omit(profile, 'id') 33 | this.model 34 | .upsert({ 35 | where: { userId: profile.userId }, 36 | update, 37 | create, 38 | }) 39 | .then((rtnPrf: Profile) => { 40 | this.broker.cacher?.set(cacheId, rtnPrf, 3600 * 24 * 30) // cache in 30days 41 | return rtnPrf 42 | }) 43 | } 44 | } 45 | -------------------------------------------------------------------------------- /packages/docs/src/.vuepress/config.js: -------------------------------------------------------------------------------- 1 | const path = require('path'); 2 | 3 | module.exports = { 4 | title: 'Moleculer Boilerplate for Typescript', 5 | description: 6 | 'A Comprehensive Boilerplate for NodeJS micro-services project with `moleculerjs`.', 7 | 8 | markdown: { 9 | extendMarkdown: md => { 10 | md.use(require('markdown-it-task-lists')); 11 | } 12 | }, 13 | 14 | themeConfig: { 15 | // algolia: { 16 | // apiKey: 'ALGOLIA_API_KEY', 17 | // indexName: 'moleculerjs-boilerplate-ts', 18 | // }, 19 | sidebarDepth: 5, 20 | sidebar: { 21 | '/dev/': [ 22 | { 23 | title: 'Setup Environments', 24 | collapsable: false, 25 | children: ['prerequisite', 'source-code', 'unit-test'] 26 | } 27 | ], 28 | '/biz/': [ 29 | { 30 | title: 'Business', 31 | collapsable: false, 32 | children: [''] 33 | } 34 | ] 35 | }, 36 | nav: [ 37 | { text: '🏢 Home', link: '/' }, 38 | { text: '🛠 Development', link: '/dev/' }, 39 | { text: '💼 Business', link: '/biz/' }, 40 | { 41 | text: '💡 Stage', 42 | items: [ 43 | { text: 'Development', link: 'https://dev.ltv.vn' }, 44 | { text: 'Nightly', link: 'https://nightly.ltv.vn' }, 45 | { text: 'Staging', link: 'https://staging.ltv.vn' }, 46 | { text: 'Production', link: 'https://prod.ltv.vn' } 47 | ] 48 | }, 49 | { text: '🚀 Changelog 🚀', link: '/changelog/' } 50 | ] 51 | } 52 | }; 53 | -------------------------------------------------------------------------------- /packages/backend/tools/login-dev.js: -------------------------------------------------------------------------------- 1 | const { ServiceBroker } = require('moleculer') 2 | const ApiGateway = require('moleculer-web') 3 | const path = require('path') 4 | const fs = require('fs') 5 | 6 | const broker = new ServiceBroker() 7 | 8 | broker.createService({ 9 | name: 'www', 10 | mixins: [ApiGateway], 11 | 12 | settings: { 13 | port: 3003, 14 | path: '/', 15 | 16 | assets: { 17 | // Root folder of assets 18 | folder: path.join(__dirname, 'firebase'), 19 | // Options to `server-static` module 20 | options: {}, 21 | }, 22 | 23 | routes: [ 24 | { 25 | path: '/', 26 | autoAliases: true, 27 | whitelist: ['www.*'], 28 | }, 29 | ], 30 | }, 31 | 32 | actions: { 33 | appJS: { 34 | rest: 'GET /app.js', 35 | handler(ctx) { 36 | console.log('>> app.js') 37 | ctx.meta.$responseType = 'text/plain' 38 | const stream = fs.createReadStream(path.resolve(__dirname, 'firebase', 'app.js'), 'utf8') 39 | setTimeout(() => { 40 | stream.read(1024) 41 | }, 100) 42 | 43 | return stream 44 | }, 45 | }, 46 | }, 47 | }) 48 | 49 | const url = 'http://localhost:3003' 50 | 51 | broker 52 | .start() 53 | .then(() => console.log(`Open ${url} to login and claim the access token`)) 54 | .then(() => { 55 | const start = 56 | process.platform == 'darwin' ? 'open' : process.platform == 'win32' ? 'start' : 'xdg-open' 57 | require('child_process').exec(start + ' ' + url) 58 | }) 59 | // .then(() => broker.repl()) 60 | -------------------------------------------------------------------------------- /provision/docker-compose.yml: -------------------------------------------------------------------------------- 1 | version: "3.6" 2 | services: 3 | postgres105: 4 | image: postgres:10.5 5 | container_name: postgres105 6 | hostname: postgres105 7 | restart: always 8 | volumes: 9 | - postgres105:/var/lib/postgresql/data 10 | ports: 11 | - 5432:5432 12 | networks: 13 | - mbt 14 | environment: 15 | POSTGRES_USER: mbt 16 | POSTGRES_PASSWORD: Mbt!@#456 17 | POSTGRES_DB: mbt 18 | 19 | pgadmin4: 20 | image: dpage/pgadmin4 21 | container_name: pgadmin4 22 | restart: always 23 | volumes: 24 | - ./data/pgadmin4:/var/lib/pgadmin 25 | ports: 26 | - 5433:80 27 | networks: 28 | - mbt 29 | environment: 30 | PGADMIN_DEFAULT_EMAIL: "admin@ltv.vn" 31 | PGADMIN_DEFAULT_PASSWORD: "Mbt!@#456" 32 | 33 | nats: 34 | image: nats:1.3.0-linux 35 | container_name: nats 36 | restart: always 37 | ports: 38 | - 4222:4222 39 | - 4444:4444 40 | - 6222:6222 41 | - 8222:8222 42 | networks: 43 | - mbt 44 | 45 | redis: 46 | image: redis:5-alpine 47 | container_name: redis5 48 | hostname: redis5 49 | restart: always 50 | ports: 51 | - 6379:6379 52 | networks: 53 | - mbt 54 | 55 | redis-commander: 56 | image: rediscommander/redis-commander 57 | container_name: redis-commander 58 | hostname: redis-commander 59 | restart: always 60 | ports: 61 | - 6380:8081 62 | networks: 63 | - mbt 64 | environment: 65 | REDIS_HOSTS: local:redis5:6379 66 | 67 | volumes: 68 | pgadmin4: 69 | postgres105: 70 | 71 | networks: 72 | mbt: 73 | external: true 74 | -------------------------------------------------------------------------------- /packages/backend/shared/core/graphql.svc.ts: -------------------------------------------------------------------------------- 1 | /* eslint-disable @typescript-eslint/explicit-module-boundary-types, @typescript-eslint/no-explicit-any */ 2 | import { PrismaClient } from '.prisma/client' 3 | import isArray from 'lodash/isArray' 4 | import isFunction from 'lodash/isFunction' 5 | import isString from 'lodash/isString' 6 | import has from 'lodash/has' 7 | import lowerFirst from 'lodash/lowerFirst' 8 | import { BaseService } from 'shared/core/base.svc' 9 | 10 | export class GraphQLService extends BaseService { 11 | // database (S) 12 | public prisma!: PrismaClient 13 | public get model(): ModelDelegate { 14 | const name = this.settings.database?.table 15 | if (!name) { 16 | return null 17 | } 18 | if (!has(this.prisma, lowerFirst(name))) { 19 | return null 20 | } 21 | return (this.prisma as any)[lowerFirst(name)] 22 | } 23 | // database (E) 24 | 25 | // graphql (S) 26 | protected async transformPayload( 27 | responseKey: string | null, 28 | data: DataType | DataType[], 29 | options?: GraphQLTransformPayloadOptions, 30 | ): Promise | GraphQLPayload> { 31 | if (!data) { 32 | return null 33 | } 34 | 35 | if (isArray(data)) { 36 | const payload: GraphQLPayload = { 37 | nodes: data, 38 | } 39 | 40 | if (isFunction(options?.totalCount)) { 41 | payload.totalCount = await options.totalCount() 42 | } 43 | 44 | return payload 45 | } 46 | 47 | if (!isString(responseKey)) { 48 | throw new Error('Require response key when return an object. Ex: { user: userData }') 49 | } 50 | 51 | return { [responseKey]: data } 52 | } 53 | // graphql (E) 54 | } 55 | -------------------------------------------------------------------------------- /packages/backend/shared/mixins/config.mixin.ts: -------------------------------------------------------------------------------- 1 | import { Config } from '@prisma/client' 2 | import _ from 'lodash' 3 | import { Context, GenericObject, Utils } from 'moleculer' 4 | 5 | // eslint-disable-next-line @typescript-eslint/explicit-module-boundary-types 6 | export default function (keys: string[], opts?: GenericObject) { 7 | opts = _.defaultsDeep(opts, { 8 | propName: 'config', 9 | objPropName: 'configObj', 10 | configChanged: 'configChanged', 11 | serviceName: 'config', 12 | serviceVersion: 1, 13 | }) 14 | 15 | return { 16 | dependencies: [{ name: opts.serviceName, version: opts.serviceVersion }], 17 | 18 | events: { 19 | async 'config.changed'(ctx: Context) { 20 | this.logger.info('Configuration changed. Updating...') 21 | const changes = Array.isArray(ctx.params) ? ctx.params : [ctx.params] 22 | changes.forEach((item: Config) => { 23 | if (keys.some((key) => Utils.match(item.key, key))) { 24 | this[opts.propName][item.key] = item.value 25 | _.set(this[opts.objPropName], item.key, item.value) 26 | this.logger.debug('Configuration updated:', this[opts.propName]) 27 | 28 | if (_.isFunction(this[opts.configChanged])) { 29 | this[opts.configChanged].call(this, item.key, item.value, item) 30 | } 31 | } 32 | }) 33 | this.logger.info('Configuration changed.', this[opts.propName]) 34 | }, 35 | }, 36 | 37 | async started() { 38 | if (!_.isObject(this[opts.propName])) this[opts.propName] = {} 39 | if (!_.isObject(this[opts.objPropName])) this[opts.objPropName] = {} 40 | 41 | if (keys.length > 0) { 42 | const items = await this.broker.call('v1.config.get', { key: keys }) 43 | if (items) { 44 | items.forEach((item: Config) => { 45 | this[opts.propName][item.key] = item.value 46 | _.set(this[opts.objPropName], item.key, item.value) 47 | }) 48 | } 49 | } 50 | 51 | this.logger.debug('Configuration loaded:', this[opts.propName]) 52 | }, 53 | } 54 | } 55 | -------------------------------------------------------------------------------- /packages/backend/tools/firebase/app.js: -------------------------------------------------------------------------------- 1 | const template = ` 2 |
3 |
4 |
5 | 6 |
7 |
8 | 9 |
10 | 11 | 12 |
13 | 14 |
15 |
Logged in with: {{ user.email }}, displayName: {{ user.displayName }}
16 | 17 |
18 | 19 |
20 |
21 | accessToken: 22 |
23 | {{ token }} 24 |
25 |
26 | ` 27 | 28 | window.onload = () => { 29 | const App = { 30 | template, 31 | setup() { 32 | const loading = Vue.ref(true) 33 | const token = Vue.ref() 34 | const form = Vue.ref({ 35 | email: 'admin@ltv.dev', 36 | password: 'Admin@1234', 37 | }) 38 | const user = Vue.ref({}) 39 | 40 | firebase.initializeApp({ 41 | apiKey: 'AIzaSyCEBdy3Yw6OupV6R7aUwUd0w5q9Cdr_hr4', 42 | projectId: 'moleculer-boilerplate-ts', 43 | }) 44 | firebase.auth().onAuthStateChanged(async (authenticatedUser) => { 45 | if (!authenticatedUser) { 46 | token.value = '' 47 | user.value = {} 48 | } 49 | const idToken = await authenticatedUser.getIdToken(true) 50 | token.value = idToken 51 | user.value = authenticatedUser 52 | }) 53 | 54 | Vue.onMounted(() => { 55 | loading.value = false 56 | }) 57 | 58 | const handleLogin = async () => { 59 | loading.value = true 60 | firebase 61 | .auth() 62 | .signInWithEmailAndPassword(form.value.email, form.value.password) 63 | .then(() => (loading.value = false)) 64 | .catch((error) => { 65 | console.error(error) 66 | }) 67 | } 68 | 69 | const handleLogout = async () => { 70 | firebase.auth().signOut() 71 | } 72 | 73 | return { token, user, form, handleLogin, handleLogout, loading } 74 | }, 75 | } 76 | 77 | Vue.createApp(App).mount('#app') 78 | } 79 | -------------------------------------------------------------------------------- /.devcontainer/provision/env/provision.js: -------------------------------------------------------------------------------- 1 | const path = require('path'); 2 | const fs = require('fs'); 3 | const config = require('./config.json'); 4 | const util = require('util'); 5 | 6 | const readFileAsync = util.promisify(fs.readFile); 7 | const writeFileAsync = util.promisify(fs.writeFile); 8 | 9 | const ENV_DIR = path.resolve(__dirname); 10 | const PROJECT_DIR = process.argv[2]; 11 | if (!PROJECT_DIR) { 12 | throw new Error('Please provide project directory. Ex: node provision.js PKG_DIR'); 13 | } 14 | const PKG_DIR = path.resolve(PROJECT_DIR, 'packages'); 15 | const allEnvFiles = fs.readdirSync(ENV_DIR).filter(f => f.endsWith('.env')); 16 | 17 | // Create default environment for each package 18 | function createEnv() { 19 | const { packages } = config; 20 | const pkgNames = Object.keys(packages); 21 | 22 | return Promise 23 | .all(pkgNames.map(pkgNm => getPkgEnv(pkgNm, packages[pkgNm]))) 24 | .then(pkgEnvContents => Promise.all(pkgEnvContents.map(pkg => createPkgEnv(pkg, { envFile: '.env.development' })))) 25 | } 26 | 27 | function getPkgEnv(pkgNm, { includes, excludes }) { 28 | includes = includes || []; 29 | excludes = excludes || []; 30 | 31 | if (includes.indexOf('*') !== -1) { 32 | includes = [...allEnvFiles]; 33 | } 34 | if (excludes.indexOf('*') !== -1) { 35 | return; 36 | } 37 | includes = includes.map(f => `${f}`.endsWith('.env') ? f : `${f}.env`); 38 | excludes = excludes.map(f => `${f}`.endsWith('.env') ? f : `${f}.env`); 39 | const envFiles = allEnvFiles.filter(f => includes.indexOf(f) !== -1 && excludes.indexOf(f) === -1); 40 | return Promise 41 | .all(envFiles.map(f => readFileAsync(path.resolve(ENV_DIR, f), { encoding: 'utf8' }))) 42 | .then(envContents => ({ name: pkgNm, content: envContents.join('\n') })); 43 | } 44 | 45 | function createPkgEnv(pkg, options = { envFile: '.env.development' }) { 46 | const writeToDir = path.resolve(PKG_DIR, pkg.name); 47 | const writeTo = path.resolve(writeToDir, options.envFile); 48 | if (!fs.existsSync(writeToDir)) { 49 | return Promise.resolve({ package: pkg.name, writeTo, status: '404' }); 50 | } 51 | return writeFileAsync(writeTo, pkg.content, { encoding: 'utf8' }) 52 | .then(() => ({ package: pkg.name, writeTo, status: 'OK' })); 53 | } 54 | 55 | 56 | createEnv().then((results) => console.log(results)); 57 | -------------------------------------------------------------------------------- /packages/backend/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "backend", 3 | "version": "0.1.0", 4 | "description": "Moleculer boilerplate backend", 5 | "main": "index.js", 6 | "repository": "git@github.com:ltv/moleculer-boilerplate-ts.git", 7 | "author": "Luc ", 8 | "license": "MIT", 9 | "private": true, 10 | "workspaces": [ 11 | "services/**" 12 | ], 13 | "scripts": { 14 | "postinstall": "node tools/create-env.js", 15 | "serve": "export NODE_PATH=./ && node -r ts-node/register node_modules/.bin/moleculer-runner --hot --repl --envfile=local.env services --mask **/*.svc.ts --config shared/configs/moleculer.config.ts", 16 | "prisma": "prisma", 17 | "migrate": "yarn prisma migrate dev --schema shared/prisma/schema.prisma", 18 | "postmigrate": "yarn generate:graphql:prisma", 19 | "generate:graphql": "export NODE_PATH=./ && node -r ts-node/register ./tools/generate-graphql.ts", 20 | "generate:graphql:prisma": "yarn generate:graphql && yarn prisma generate --schema shared/prisma/schema.prisma", 21 | "lint": "eslint \"**/*.ts\" --resolve-plugins-relative-to .", 22 | "lint:fix": "eslint \"**/*.ts\" --fix --resolve-plugins-relative-to .", 23 | "prettier:write": "prettier --config .prettierrc --write {,*/**/,**/}*.ts", 24 | "build": "rm -rf dist && node build.js" 25 | }, 26 | "dependencies": { 27 | "@prisma/client": "^3.1.1", 28 | "graphql": "^15.6.0", 29 | "ioredis": "^4.27.9", 30 | "lodash": "^4.14.175", 31 | "moleculer": "^0.14.17", 32 | "moleculer-decorators": "^1.3.0", 33 | "nats": "^2.2.0", 34 | "pg": "^8.7.1", 35 | "postgraphile-core": "^4.12.1", 36 | "postgraphile-plugin-connection-filter": "^2.2.2", 37 | "redlock": "^4.2.0" 38 | }, 39 | "devDependencies": { 40 | "@ltv/env": "^1.2.0", 41 | "@types/lodash": "^4.14.175", 42 | "@types/node": "^16.10.2", 43 | "@types/pluralize": "^0.0.29", 44 | "@typescript-eslint/eslint-plugin": "^4.32.0", 45 | "@typescript-eslint/parser": "^4.32.0", 46 | "dotenv": "^10.0.0", 47 | "esbuild": "^0.13.3", 48 | "eslint": "^7.32.0", 49 | "eslint-config-prettier": "^8.3.0", 50 | "eslint-plugin-prettier": "^4.0.0", 51 | "moleculer": "^0.14.17", 52 | "moleculer-repl": "^0.6.6", 53 | "prettier": "^2.4.1", 54 | "prisma": "^3.1.1", 55 | "ts-node": "^10.2.1", 56 | "typescript": "^4.4.3" 57 | } 58 | } 59 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Moleculer Boilerplate for Typescript Projects 2 | 3 | A Comprehensive Boilerplate for NodeJS micro-services project with `moleculerjs`. 4 | 5 | ## Technical stack 6 | 7 | | Language / Framework | Version | Description | Author & Repository | 8 | | -------------------- | ------- | ------------------------------------------------------------------------------------------------ | ----------------------------------------------------------------- | 9 | | Typescript | 3.9 | TypeScript is a superset of JavaScript that compiles to clean JavaScript output. | [Typescript - Microsoft](https://github.com/microsoft/TypeScript) | 10 | | MoleculerJS | 0.14 | Progressive microservices framework for Node.js. | [Moleculer - Icebob](https://github.com/moleculerjs/moleculer) | 11 | | Mongoose | 5.9.25 | elegant mongodb object modeling for node.js Database | [Mongoose Official Page](https://mongoosejs.com/) | 12 | | VueJS | 3.0.0 | Vue.js is a progressive, incrementally-adoptable JavaScript framework for building UI on the web | [VueJS - Evan You](https://github.com/vuejs/vue) | 13 | | VuePress | 1.3.0 | Minimalistic Vue-powered static site generator | [VuePress - VueJS Team](https://github.com/vuejs/vuepress) | 14 | | Helm Charts | 3.0.3 | The Kubernetes Package Manager | [Helm - Helm](https://github.com/helm/helm) | 15 | 16 | ## Features 17 | 18 | - [ ] Frontend with authenticated user 19 | - [ ] Simple Dashboard 20 | - [ ] User Profile 21 | - [ ] Login with Local Database & Social Provider 22 | - [ ] User Management 23 | - [ ] Backend with ApolloServer Federation 24 | - [ ] Services 25 | - [ ] Auth - For Authentication 26 | - [ ] Gateway - API Gateway with ApolloServer Federation 27 | - [ ] System - Logging & More 28 | - [ ] User - User Management 29 | - Packages 30 | - [ ] Docs - Document for setting up environment and more 31 | - [ ] Models - Sharing Definition Typed between Frontend & Backend 32 | -------------------------------------------------------------------------------- /packages/backend/shared/mixins/firebase.mixin.ts: -------------------------------------------------------------------------------- 1 | import { Profile } from '@prisma/client' 2 | import admin from 'firebase-admin' 3 | import { ClientRequest } from 'http' 4 | import { Context, Errors, GenericObject, ServiceSchema, ServiceSettingSchema } from 'moleculer' 5 | 6 | const { MoleculerError } = Errors 7 | type AuthRequest = ClientRequest & { headers: GenericObject } 8 | 9 | export const FirebaseAuthMixin: ServiceSchema = { 10 | name: null, 11 | settings: {}, 12 | actions: {}, 13 | methods: { 14 | init() { 15 | if (!admin.apps.length) { 16 | admin.initializeApp() 17 | } 18 | }, 19 | /** 20 | * Authenticate the request 21 | * 22 | * @param {Context} ctx 23 | * @param {Object} route 24 | * @param {IncomingRequest} req 25 | * @returns {Promise} 26 | */ 27 | async authenticate(ctx: Context,_: unknown,req: AuthRequest) { 28 | this.logger.info('Authenticating...') 29 | let token 30 | 31 | // Get JWT token from Authorization header 32 | const auth = req.headers['authorization'] 33 | if (auth && auth.startsWith('Bearer ')) { 34 | token = auth.slice(7) 35 | } 36 | 37 | if (!token) { 38 | throw new MoleculerError('Invalid Token', 401, 'INVALID_TOKEN') 39 | } 40 | 41 | const user: Profile = await this.verifyJWTToken(token) 42 | if (!user || !user.userId) { 43 | return 44 | } 45 | ctx.meta.token = token 46 | 47 | ctx.emit('auth.resolvedToken', user) 48 | this.setMeta(ctx, { userId: user.userId }) 49 | 50 | return ctx.meta.user 51 | }, 52 | 53 | async verifyJWTToken(token: string): Promise { 54 | try { 55 | const decoded: admin.auth.DecodedIdToken = await admin.auth().verifyIdToken(token) 56 | return { 57 | userId: decoded.uid, 58 | displayName: decoded.name, 59 | photoUrl: decoded.picture, 60 | email: decoded.email, 61 | } as Profile 62 | } catch (err) { 63 | this.logger.error(err.message) 64 | throw new MoleculerError('Token has expired', 401, 'TOKEN_EXPIRED') 65 | } 66 | }, 67 | 68 | setMeta(ctx: Context>, user: Profile) { 69 | ctx.meta.userId = user.userId 70 | ctx.meta.user = user 71 | }, 72 | }, 73 | created() { 74 | this.init() 75 | }, 76 | } 77 | -------------------------------------------------------------------------------- /packages/backend/tools/generate-graphql.ts: -------------------------------------------------------------------------------- 1 | import env from '@ltv/env' 2 | import fs from 'fs' 3 | import path from 'path' 4 | import { Client } from 'pg' 5 | import { generateSchemaFromDb } from 'shared/utils/graphql' 6 | 7 | const schemas = [env('DATABASE_SCHEMA', 'public')] 8 | const nodeEnv = env('NODE_ENV', 'development') 9 | const isProd = () => ['prod', 'production'].indexOf(nodeEnv) != -1 10 | const graphqlDir = path.resolve('./graphql') 11 | if (!fs.existsSync(graphqlDir)) { 12 | fs.mkdirSync(graphqlDir) 13 | } 14 | const schemaPath = path.resolve(graphqlDir, `schema${isProd() ? '' : '-dev'}.graphql`) 15 | if (fs.existsSync(schemaPath) && isProd()) { 16 | // Generated in the production 17 | process.exit() 18 | } 19 | 20 | const client = new Client({ 21 | host: env('DATABASE_HOST'), 22 | database: env('DATABASE_NAME'), 23 | user: env('DATABASE_USER'), 24 | password: env('DATABASE_PASS'), 25 | port: env.int('DATABASE_PORT', 5432), 26 | ssl: env.bool('DATABASE_SSL', false) 27 | }) 28 | 29 | const connect = () => client && client.connect() 30 | const disconnect = () => client && client.end() 31 | 32 | const getTableQuery = ` 33 | SELECT table_schema,table_name, obj_description(oid) as "comment" FROM information_schema.tables 34 | JOIN pg_class ON pg_class.relname = table_name 35 | WHERE "table_schema" IN (${schemas.map((s) => "'" + s + "'").join(',')}) 36 | ORDER BY table_schema,table_name; 37 | ` 38 | 39 | type Table = { 40 | table_schema: string 41 | table_name: string 42 | comment: string 43 | } 44 | 45 | async function omitAutoGeneratedTables() { 46 | const result = await client.query(getTableQuery) 47 | const tables = result.rows 48 | const omitTables: string[] = tables 49 | .filter((t: Table) => t.table_name.startsWith('_') && !`${t.comment}`.startsWith('@omit')) 50 | .map((t: Table) => `${t.table_schema}."${t.table_name}"`) 51 | 52 | if (omitTables.length > 0) { 53 | console.log('omit tables: ', omitTables.join(', ')) 54 | const queries = omitTables.map((t: string) => `comment on table ${t} is E'@omit';`) 55 | await Promise.all(queries.map((query) => client.query(query))) 56 | } 57 | } 58 | 59 | const run = async () => { 60 | await connect() 61 | const generateTo = schemaPath 62 | await omitAutoGeneratedTables() 63 | 64 | const schema = await generateSchemaFromDb(schemas) 65 | console.log(`🚀 Generated GraphQL Types from database to '${generateTo}'`) 66 | fs.writeFileSync(generateTo, schema) 67 | } 68 | 69 | run() 70 | .catch((e) => { 71 | throw e 72 | }) 73 | .finally(async () => { 74 | await disconnect() 75 | }) 76 | -------------------------------------------------------------------------------- /.devcontainer/scripts/install-dev-tools.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/zsh 2 | PROJECT_DIR=$(pwd) 3 | PROVISION_DIR=$PROJECT_DIR/.devcontainer/provision 4 | APP_DIR=$PROVISION_DIR/app 5 | ENV_DIR=$PROVISION_DIR/env 6 | 7 | # Create user bin 8 | mkdir -p ~/bin 9 | 10 | # Update pkg 11 | sudo apt-get update -y 12 | 13 | # Install pkg 14 | sudo apt-get install -y tig wget vim unzip curl awscli git openssl jq python3 ansible dnsutils 15 | 16 | # Install Terraform 17 | wget https://releases.hashicorp.com/terraform/0.12.25/terraform_0.12.25_linux_amd64.zip && 18 | unzip ./terraform_0.12.25_linux_amd64.zip -d $HOME/bin/ && 19 | rm -rf ./terraform_0.12.25_linux_amd64.zip 20 | 21 | # Install helm-diff plugin 22 | helm plugin install https://github.com/databus23/helm-diff --version master 23 | 24 | # Install helm-secrets plugin 25 | helm plugin install https://github.com/futuresimple/helm-secrets 26 | 27 | # Install Helmsman 28 | curl -L https://github.com/Praqma/helmsman/releases/download/v3.4.0/helmsman_3.4.0_linux_amd64.tar.gz | tar zx && 29 | chmod +x helmsman && 30 | mv ./helmsman $HOME/bin/helmsman 31 | 32 | # Install aws-iam-authenticator 33 | curl -o aws-iam-authenticator https://amazon-eks.s3-us-west-2.amazonaws.com/1.15.10/2020-02-22/bin/linux/amd64/aws-iam-authenticator && 34 | chmod +x ./aws-iam-authenticator && 35 | mv ./aws-iam-authenticator $HOME/bin/aws-iam-authenticator 36 | 37 | # Install doctl 38 | curl -OL https://github.com/digitalocean/doctl/releases/download/v1.45.1/doctl-1.45.1-linux-amd64.tar.gz && 39 | tar xf doctl-1.45.1-linux-amd64.tar.gz && 40 | mv ./doctl $HOME/bin/doctl && 41 | rm -rf doctl-1.45.1-linux-amd64.tar.gz 42 | 43 | # Install FiraCode fonts 44 | sh ./install-firacode.sh 45 | 46 | # Install oh-my-zsh 47 | mkdir -p ~/tmp && curl -o ~/tmp/install.sh https://raw.githubusercontent.com/ohmyzsh/ohmyzsh/master/tools/install.sh 48 | sh ~/tmp/install.sh --skip-chsh --unattended && rm ~/tmp/install.sh 49 | 50 | # Install zsh plugins 51 | mkdir -p ~/.zsh 52 | git clone https://github.com/zsh-users/zsh-autosuggestions ~/.zsh/zsh-autosuggestions 53 | git clone https://github.com/zsh-users/zsh-syntax-highlighting.git ~/.zsh/zsh-syntax-highlighting 54 | echo "source ~/.zsh/zsh-autosuggestions/zsh-autosuggestions.zsh" >>${ZDOTDIR:-$HOME}/.zshrc 55 | echo "source ~/.zsh/zsh-syntax-highlighting/zsh-syntax-highlighting.zsh" >>${ZDOTDIR:-$HOME}/.zshrc 56 | 57 | # Install zsh theme 58 | git clone --depth=1 https://github.com/romkatv/powerlevel10k.git ${ZSH_CUSTOM:-~/.oh-my-zsh/custom}/themes/powerlevel10k 59 | ## powerlevel10k/powerlevel10k 60 | 61 | # Install vimrc 62 | git clone --depth=1 https://github.com/amix/vimrc.git ~/.vim_runtime 63 | sh ~/.vim_runtime/install_awesome_vimrc.sh 64 | 65 | # Install docker-compose 66 | sudo curl -L "https://github.com/docker/compose/releases/download/1.23.1/docker-compose-$(uname -s)-$(uname -m)" -o $HOME/bin/docker-compose 67 | sudo chmod +x $HOME/bin/docker-compose 68 | -------------------------------------------------------------------------------- /packages/backend/shared/mixins/apollo-server.mixin.ts: -------------------------------------------------------------------------------- 1 | /* eslint-disable @typescript-eslint/no-explicit-any */ 2 | import env from '@ltv/env' 3 | import fs from 'fs' 4 | import { Kind } from 'graphql' 5 | import isString from 'lodash/isString' 6 | import { ApolloService, GraphQLUpload } from 'moleculer-apollo-server' 7 | import path from 'path' 8 | 9 | const nodeEnv = env('NODE_ENV', 'development') 10 | const isProd = () => ['prod', 'production'].indexOf(nodeEnv) != -1 11 | 12 | const schemaPath = path.resolve(`./graphql/schema${isProd() ? '' : '-dev'}.graphql`) 13 | 14 | if (!fs.existsSync(schemaPath)) { 15 | console.log('The graphql schema does not exists, please generate first') 16 | process.exit(1) 17 | } 18 | 19 | const schema = fs.readFileSync(schemaPath, { encoding: 'utf8' }) 20 | 21 | export const ApolloMixin = ApolloService({ 22 | // Global GraphQL typeDefs 23 | typeDefs: ` 24 | type AppVersion { 25 | dashboard: String 26 | api: String 27 | } 28 | 29 | scalar Date 30 | scalar Timestamp 31 | scalar Upload 32 | 33 | """ 34 | This type describes a File entity. 35 | """ 36 | type File { 37 | filename: String! 38 | encoding: String! 39 | mimetype: String! 40 | } 41 | 42 | """ 43 | This type describes a S3File entity. 44 | """ 45 | type S3File { 46 | ETag: String! 47 | Location: String! 48 | key: String! 49 | Key: String! 50 | Bucket: String! 51 | } 52 | 53 | ${schema} 54 | `, 55 | 56 | // Global resolvers 57 | resolvers: { 58 | Node: { 59 | __resolveType(obj: any) { 60 | return obj.__typename 61 | }, 62 | }, 63 | Date: { 64 | __parseValue(value: string | Date) { 65 | return new Date(value) // value from the client 66 | }, 67 | __serialize(value: any) { 68 | return isString(value) ? value : value.toISOString().split('T')[0] // value sent to the client 69 | }, 70 | __parseLiteral(ast: any) { 71 | if (ast.kind === Kind.INT) return parseInt(ast.value, 10) // ast value is always in string format 72 | 73 | return undefined 74 | }, 75 | }, 76 | Timestamp: { 77 | __parseValue(value: any) { 78 | return new Date(value) // value from the client 79 | }, 80 | __serialize(value: any) { 81 | return isString(value) ? value : value.toISOString() // value sent to the client 82 | }, 83 | __parseLiteral(ast: any) { 84 | if (ast.kind === Kind.INT) return parseInt(ast.value, 10) // ast value is always in string format 85 | 86 | return undefined 87 | }, 88 | }, 89 | Upload: GraphQLUpload, 90 | }, 91 | 92 | routeOptions: { 93 | path: '/graphql', 94 | authentication: true, 95 | cors: true, 96 | mappingPolicy: 'restrict', 97 | }, 98 | 99 | serverOptions: { 100 | playground: false, 101 | introspection: true, 102 | tracing: false, 103 | // engine: { 104 | // apiKey: env('APOLLO_KEY'), 105 | // reportSchema: true, 106 | // variant: 'current', 107 | // } as any, 108 | }, 109 | }) 110 | -------------------------------------------------------------------------------- /.devcontainer/devcontainer.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "App Dev Container", 3 | 4 | "forwardPorts": [3000, 6380], 5 | 6 | // Set *default* container specific settings.json values on container create. 7 | "settings": { 8 | "terminal.integrated.shell.linux": "/usr/bin/zsh", 9 | "workbench.iconTheme": "material-icon-theme", 10 | "files.insertFinalNewline": true, 11 | "editor.renderWhitespace": "all", 12 | "editor.tabSize": 2, 13 | "editor.fontFamily": "MesloLGS NF", 14 | "workbench.colorTheme": "Night Owl", 15 | "gitlens.advanced.messages": { 16 | "suppressShowKeyBindingsNotice": true 17 | }, 18 | "gitlens.defaultDateFormat": "dddd, MMMM Do YYYY, h:mm:ss a", 19 | "gitlens.historyExplorer.enabled": false, 20 | "window.zoomLevel": 1, 21 | "prettier.singleQuote": true, 22 | "javascript.updateImportsOnFileMove.enabled": "always", 23 | "terminal.integrated.rendererType": "dom", 24 | "vetur.format.defaultFormatter.css": "prettier", 25 | "vetur.format.defaultFormatter.postcss": "prettier", 26 | "vetur.format.defaultFormatter.scss": "prettier", 27 | "vetur.format.defaultFormatter.less": "prettier", 28 | "vetur.format.defaultFormatter.stylus": "stylus-supremacy", 29 | "vetur.format.defaultFormatter.js": "prettier", 30 | "vetur.format.defaultFormatter.ts": "prettier", 31 | "material-icon-theme.activeIconPack": "react", 32 | "editor.formatOnSave": true, 33 | "[typescript]": { 34 | "editor.defaultFormatter": "vscode.typescript-language-features" 35 | }, 36 | "docker.groupImagesBy": "RepositoryName", 37 | "[javascript]": { 38 | "editor.defaultFormatter": "vscode.typescript-language-features" 39 | }, 40 | "[json]": { 41 | "editor.defaultFormatter": "esbenp.prettier-vscode" 42 | }, 43 | "[dockerfile]": { 44 | "editor.defaultFormatter": "ms-azuretools.vscode-docker" 45 | }, 46 | "typescript.updateImportsOnFileMove.enabled": "always", 47 | "[jsonc]": { 48 | "editor.defaultFormatter": "esbenp.prettier-vscode" 49 | }, 50 | "[markdown]": { 51 | "editor.defaultFormatter": "yzhang.markdown-all-in-one" 52 | }, 53 | "[typescriptreact]": { 54 | "editor.defaultFormatter": "esbenp.prettier-vscode" 55 | }, 56 | "[vue]": { 57 | "editor.defaultFormatter": "esbenp.prettier-vscode" 58 | } 59 | }, 60 | 61 | "postCreateCommand": "/bin/bash ./.devcontainer/scripts/install-dev-tools.sh > ~/post-create.log", 62 | 63 | // Add the IDs of extensions you want installed when the container is created. 64 | "extensions": [ 65 | "eamodio.gitlens", 66 | "sdras.night-owl", 67 | "aaron-bond.better-comments", 68 | "ms-azuretools.vscode-docker", 69 | "msjsdiag.debugger-for-chrome", 70 | "coenraads.bracket-pair-colorizer", 71 | "mikestead.dotenv", 72 | "donjayamanne.githistory", 73 | "prisma.vscode-graphql", 74 | "wix.vscode-import-cost", 75 | "yzhang.markdown-all-in-one", 76 | "DavidAnson.vscode-markdownlint", 77 | "pkief.material-icon-theme", 78 | "whizkydee.material-palenight-theme", 79 | "esbenp.prettier-vscode", 80 | "foxundermoon.shell-format", 81 | "prisma.prisma", 82 | "wayou.vscode-todo-highlight", 83 | "ms-vscode.vscode-typescript-tslint-plugin", 84 | "octref.vetur", 85 | "hollowtree.vue-snippets", 86 | "redhat.vscode-yaml", 87 | "codezombiech.gitignore", 88 | "formulahendry.auto-close-tag", 89 | "formulahendry.auto-rename-tag", 90 | "joelday.docthis", 91 | "robinbentley.sass-indented", 92 | "hashicorp.terraform", 93 | "vscoss.vscode-ansible" 94 | ] 95 | } 96 | -------------------------------------------------------------------------------- /packages/backend/shared/utils/graphql.ts: -------------------------------------------------------------------------------- 1 | import { printSchema } from 'graphql/utilities' 2 | import _camelCase from 'lodash/camelCase' 3 | import _upperFirst from 'lodash/upperFirst' 4 | import { ActionParams } from 'moleculer' 5 | import plz from 'pluralize' 6 | import { createPostGraphileSchema, Plugin } from 'postgraphile-core' 7 | import ConnectionFilterPlugin from 'postgraphile-plugin-connection-filter' 8 | import { createPGPoolInstance } from 'shared/utils/db' 9 | 10 | export const constantCaseAll = (str: string): string => 11 | str 12 | .replace(/[^a-zA-Z0-9_]+/g, '_') 13 | .replace(/[A-Z]+/g, '_$&') 14 | .replace(/__+/g, '_') 15 | .replace(/^[^a-zA-Z0-9]+/, '') 16 | .replace(/^[0-9]/, '_$&') // GraphQL enums must not start with a number 17 | .toUpperCase() 18 | 19 | export const formatInsideUnderscores = 20 | (fn: (input: string) => string) => 21 | (str: string): string => { 22 | const matches = str.match(/^(_*)([\s\S]*?)(_*)$/) 23 | if (!matches) { 24 | throw new Error('Impossible?') // Satiate Flow 25 | } 26 | const [, start, middle, end] = matches 27 | return `${start}${fn(middle)}${end}` 28 | } 29 | 30 | export const upperFirst = formatInsideUnderscores(_upperFirst) 31 | export const camelCase = formatInsideUnderscores(_camelCase) 32 | export const constantCase = formatInsideUnderscores(constantCaseAll) 33 | export const upperCamelCase = (str: string): string => upperFirst(camelCase(str)) 34 | 35 | export const pluralize = (str: string): string => plz(str) 36 | export const singularize = (str: string): string => plz.singular(str) 37 | 38 | export default {} 39 | 40 | interface TypeTracer { 41 | [key: string]: boolean 42 | } 43 | 44 | async function generateGraphQLFromDbSchema(schemaName: string, typeTracer: TypeTracer) { 45 | const options = { commentDescriptions: true } 46 | const schema = await createPostGraphileSchema(createPGPoolInstance(), schemaName, { 47 | appendPlugins: [ConnectionFilterPlugin as Plugin], 48 | graphileBuildOptions: { connectionFilterAllowEmptyObjectInput: true }, 49 | }) 50 | const typeMaps: any = (schema as any)._typeMap 51 | const typeKeys: string[] = Object.keys(typeMaps).filter((t) => !t.startsWith('_')) 52 | 53 | typeKeys.forEach((type) => { 54 | if (typeTracer[type] || type.indexOf('Migration') !== -1) { 55 | delete (schema as any)._typeMap[type] 56 | } 57 | typeTracer[type] = true 58 | }) 59 | 60 | // remove unused types 61 | Object.keys(typeMaps) 62 | .filter((t) => t.startsWith('_')) 63 | .forEach((t) => delete (schema as any)._typeMap[t]) 64 | 65 | return printSchema(schema as any, options) 66 | } 67 | 68 | export async function generateSchemaFromDb( 69 | schemas: string[], 70 | typeTracer?: TypeTracer, 71 | ): Promise { 72 | typeTracer = { 73 | Query: true, 74 | Mutation: true, 75 | String: true, 76 | Boolean: true, 77 | ...(typeTracer || {}), 78 | } 79 | const generated: string[] = await Promise.all( 80 | schemas.map((dbSchema) => generateGraphQLFromDbSchema(dbSchema, typeTracer)), 81 | ) 82 | 83 | return generated.join('\n') 84 | } 85 | 86 | export function createGraphQLInputParams( 87 | props: ActionParams, 88 | options?: { optional?: boolean; key?: string }, 89 | ): ActionParams { 90 | options = options || { optional: false } 91 | 92 | const { optional, key } = options 93 | props = key ? { props: { key: { type: 'object', props } } } : props 94 | return { 95 | input: { 96 | type: 'object', 97 | optional, 98 | props, 99 | }, 100 | } 101 | } 102 | -------------------------------------------------------------------------------- /packages/backend/build.js: -------------------------------------------------------------------------------- 1 | const path = require('path') 2 | const fs = require('fs') 3 | const esbuild = require('esbuild') 4 | const uniq = require('lodash/uniq') 5 | const merge = require('lodash/merge') 6 | const outDir = path.resolve('dist') 7 | const pkg = require('./package.json') 8 | 9 | const [, , svcName] = process.argv 10 | 11 | function walkSync(dir, fileList = []) { 12 | fs.readdirSync(dir).forEach((file) => { 13 | const dirFile = path.join(dir, file) 14 | try { 15 | fileList = walkSync(dirFile, fileList) 16 | } catch (err) { 17 | if (err.code === 'ENOTDIR' || err.code === 'EBUSY') fileList = [...fileList, dirFile] 18 | else throw err 19 | } 20 | }) 21 | return fileList 22 | } 23 | 24 | function getEntries({ config }) { 25 | const svcFilter = (file) => 26 | svcName 27 | ? file.startsWith(`services/${svcName}`) && file.match(/.*\.svc\.ts$/) 28 | : file.match(/.*\.svc\.ts$/) 29 | return walkSync('./services') 30 | .filter(svcFilter) 31 | .map((file) => { 32 | const [, svc, filename] = file.substring(0, file.length - 3).split('/') 33 | return { 34 | name: `${svc}/services/${filename}`, 35 | path: `./${file}`, 36 | cfg: `${svc}/${config.key}`, 37 | gql: `${svc}/tools/generate-graphql`, 38 | } 39 | }) 40 | .reduce( 41 | (memo, { name, path, cfg, gql }) => ({ 42 | ...memo, 43 | [name]: path, 44 | [cfg]: config.path, 45 | [gql]: 'tools/generate-graphql.ts', 46 | }), 47 | {}, 48 | ) 49 | } 50 | 51 | const copyFileAsync = (from, to) => 52 | new Promise((resolve, reject) => { 53 | fs.mkdirSync(path.resolve(outDir, path.dirname(to)), { recursive: true }) 54 | fs.copyFile(path.resolve(from), path.resolve(outDir, to), (err) => { 55 | if (!err) { 56 | return reject(err) 57 | } 58 | return resolve(true) 59 | }) 60 | }) 61 | 62 | const writeFileSync = (to, data) => 63 | new Promise((resolve, reject) => 64 | fs.writeFile( 65 | path.resolve(outDir, to), 66 | JSON.stringify(data, null, 2), 67 | { encoding: 'utf8' }, 68 | (err) => { 69 | if (!err) { 70 | return reject(err) 71 | } 72 | return resolve(true) 73 | }, 74 | ), 75 | ) 76 | 77 | const filter = (x) => ['.bin', '@svc'].indexOf(x) === -1 78 | 79 | let nodeModules = fs.readdirSync('node_modules').filter(filter) 80 | if (fs.existsSync('../../node_modules')) { 81 | nodeModules = nodeModules.concat(fs.readdirSync('../../node_modules').filter(filter)) 82 | } 83 | 84 | console.time('Build Production') 85 | ;(async () => { 86 | const buildOptions = { 87 | color: true, 88 | minify: false, 89 | bundle: true, 90 | sourcemap: false, 91 | platform: 'node', 92 | tsconfig: './tsconfig.json', 93 | logLevel: 'error', 94 | external: nodeModules, 95 | } 96 | 97 | try { 98 | const entries = getEntries({ 99 | config: { key: 'moleculer.config', path: 'shared/configs/moleculer.config.ts' }, 100 | }) 101 | const names = Object.keys(entries) 102 | const entriesBuild = names.map((fileName) => 103 | esbuild.build({ 104 | entryPoints: [entries[fileName]], 105 | outfile: `${outDir}/${fileName}.js`, 106 | ...buildOptions, 107 | }), 108 | ) 109 | const uniqServices = uniq(names.map((f) => f.split('/')[0])) 110 | await Promise.all(entriesBuild) 111 | await Promise.all([ 112 | ...uniqServices.map((svc) => { 113 | const svcPkg = require(`./services/${svc}/package.json`) 114 | svcPkg.dependencies = merge(svcPkg.dependencies, pkg.dependencies) 115 | svcPkg.scripts = { 116 | prestart: 'node tools/generate-graphql.js && npx prisma generate', 117 | start: 'moleculer-runner --repl --mask *.svc.js --config moleculer.config.js services', 118 | } 119 | return writeFileSync(`${svc}/package.json`, svcPkg) 120 | }), 121 | ...uniqServices.map((svc) => 122 | copyFileAsync('shared/prisma/schema.prisma', `${svc}/prisma/schema.prisma`), 123 | ), 124 | ]) 125 | } catch (e) { 126 | console.error(e) 127 | } 128 | })() 129 | 130 | console.timeEnd('Build Production') 131 | -------------------------------------------------------------------------------- /packages/backend/shared/configs/moleculer.config.ts: -------------------------------------------------------------------------------- 1 | // import { CreateHealthCheckMiddleware } from 'middlewares/health-check.middleware' 2 | import EnvMiddleware from 'shared/middlewares/env.middleware' 3 | import { BrokerOptions, LoggerConfig } from 'moleculer' 4 | import os from 'os' 5 | 6 | const nodeIDPrefix = ((nodeID: string) => { 7 | if (!nodeID) { 8 | return '' 9 | } 10 | return `-${nodeID}` 11 | })(process.env.NODE_ID) 12 | const osHostName = os.hostname().toLowerCase() 13 | 14 | // HeathCheck (S) 15 | const { HEALTH_CHECK_READINESS_PATH, HEALTH_CHECK_LIVENESS_PATH, HEALTH_CHECK_PORT } = process.env 16 | const healthCheckOpts = { 17 | port: +HEALTH_CHECK_PORT || 3001, 18 | readiness: { 19 | path: HEALTH_CHECK_READINESS_PATH || '/ready', 20 | }, 21 | liveness: { 22 | path: HEALTH_CHECK_LIVENESS_PATH || '/live', 23 | }, 24 | } 25 | // HeathCheck (E) 26 | 27 | // Transporter (S) 28 | const { TRANSPORTER_URL, NATS_USER, NATS_PASSWORD } = process.env 29 | const transporterOpts = { 30 | type: 'NATS', 31 | options: { 32 | url: TRANSPORTER_URL, 33 | user: NATS_USER, 34 | pass: NATS_PASSWORD, 35 | }, 36 | } 37 | const transporter = NATS_USER ? transporterOpts : TRANSPORTER_URL 38 | // Transporter (E) 39 | 40 | // Redis (S) 41 | const { REDIS_HOST, REDIS_PORT, REDIS_PASSWORD, REDIS_CLUSTER } = process.env 42 | const redis = { 43 | host: REDIS_HOST, 44 | port: +REDIS_PORT, 45 | password: REDIS_PASSWORD, 46 | } 47 | const hosts = REDIS_CLUSTER ? REDIS_CLUSTER.split(',') : [] 48 | const cluster: any = { 49 | nodes: hosts.map((h: string) => { 50 | const [host, port] = h.split(':') 51 | return { host, port } 52 | }), 53 | options: { 54 | password: REDIS_PASSWORD, 55 | }, 56 | } 57 | // Redis (E) 58 | 59 | // Cacher (S) 60 | const cacher: any = { 61 | type: 'Redis', 62 | options: { 63 | // Prefix for keys 64 | prefix: process.env.REDIS_PREFIX || 'CK', 65 | // set Time-to-live to 30sec. 66 | ttl: 30, 67 | }, 68 | } 69 | if (hosts.length) { 70 | cacher.options.cluster = cluster 71 | } else { 72 | cacher.options.redis = redis 73 | } 74 | // Cacher (E) 75 | 76 | // Metrics (S) 77 | const { METRIC_PORT, METRIC_PATH } = process.env 78 | const metrics = { 79 | enabled: true, 80 | reporter: [ 81 | { 82 | type: 'Prometheus', 83 | options: { 84 | // HTTP port 85 | port: +METRIC_PORT || 3030, 86 | // HTTP URL path 87 | path: METRIC_PATH || '/metrics', 88 | // Default labels which are appended to all metrics labels 89 | // eslint-disable-next-line @typescript-eslint/explicit-module-boundary-types 90 | defaultLabels: (registry: any) => ({ 91 | namespace: registry.broker.namespace, 92 | nodeID: registry.broker.nodeID, 93 | }), 94 | }, 95 | }, 96 | ], 97 | } 98 | // Metrics (E) 99 | 100 | // LOGGER (S) 101 | const { LOGGER_TYPE, LOGGER_LEVEL } = process.env 102 | const logger: LoggerConfig = { 103 | type: LOGGER_TYPE || 'Console', 104 | options: { 105 | level: LOGGER_LEVEL || 'info', 106 | color: true, 107 | moduleColors: true, 108 | // autoPadding: true 109 | // formatter: 'short', 110 | // objectPrinter: (o: any) => inspect(o, { depth: 4, colors: true, breakLength: 100 }) 111 | }, 112 | } 113 | // LOGGER (E) 114 | 115 | const brokerConfig: BrokerOptions = { 116 | namespace: process.env.NAMESPACE || '', 117 | nodeID: `${osHostName}${nodeIDPrefix}`, 118 | 119 | // logFormatter: 'full', 120 | logger, 121 | 122 | transporter, 123 | 124 | cacher, 125 | 126 | serializer: 'JSON', 127 | 128 | requestTimeout: 900 * 1000, 129 | retryPolicy: { 130 | enabled: false, 131 | retries: 5, 132 | delay: 100, 133 | maxDelay: 1000, 134 | factor: 2, 135 | }, 136 | 137 | maxCallLevel: 100, 138 | heartbeatInterval: 5, 139 | heartbeatTimeout: 15, 140 | 141 | tracking: { 142 | enabled: false, 143 | shutdownTimeout: 5000, 144 | }, 145 | 146 | disableBalancer: false, 147 | 148 | registry: { 149 | strategy: 'RoundRobin', 150 | preferLocal: true, 151 | }, 152 | 153 | circuitBreaker: { 154 | enabled: false, 155 | threshold: 0.5, 156 | windowTime: 60, 157 | minRequestCount: 20, 158 | halfOpenTime: 10 * 1000, 159 | }, 160 | 161 | bulkhead: { 162 | enabled: false, 163 | concurrency: 10, 164 | maxQueueSize: 100, 165 | }, 166 | 167 | validator: true, 168 | 169 | metrics, 170 | 171 | tracing: { 172 | enabled: true, 173 | exporter: { 174 | type: 'Console', 175 | options: {}, 176 | }, 177 | }, 178 | 179 | // Register custom middlewares 180 | middlewares: [EnvMiddleware, /*CreateHealthCheckMiddleware(healthCheckOpts)*/], 181 | 182 | replCommands: null, 183 | } 184 | 185 | export = brokerConfig 186 | -------------------------------------------------------------------------------- /packages/docs/src/dev/source-code.md: -------------------------------------------------------------------------------- 1 | # Source code 2 | 3 | Include FrontEnd, BackEnd, Docs, Database 4 | 5 | ## Repository 6 | 7 | Please login into `gitlab` (send email to `luc@ltv.vn` to add account to projects) 8 | 9 | - [gitlab](git@gitlab.com:ltv/ltv-coffee/ltv-coffee.git) 10 | 11 | ### Setup push code by ssh key 12 | 13 | **Benefit**: Push code to `source respository` without typing password 14 | 15 | Steps: 16 | 17 | - Generate ssh key if you don't have one: [generate ssh](https://gitlab.com/help/ssh/README#generating-a-new-ssh-key-pair) 18 | 19 | - If you have exited ssh key: [existing ssh](https://gitlab.com/help/ssh/README#locating-an-existing-ssh-key-pair) 20 | 21 | - Then add to ssh manager on you account in `soure respository`: [ssh manager](https://gitlab.com/profile/keys) 22 | 23 | ## ⚒️ LTV Coffee Backend 24 | 25 | Backend with micro-services framework 26 | 27 | Path: `$project/packages/server` 28 | 29 | [AIS Postman](https://www.getpostman.com/collections/aeac4d071787299a0a68) 30 | 31 | ### Prerequisite 32 | 33 | ++ On Linux, run install script 34 | 35 | ```sh 36 | # Install docker 🐳 37 | curl -fsSL get.docker.com -o get-docker.sh 38 | sudo sh get-docker.sh 39 | docker -v 40 | 41 | # Install docker-compose 42 | sudo curl -L https://github.com/docker/compose/releases/download/1.22.0/docker-compose-$(uname -s)-$(uname -m) -o /usr/local/bin/docker-compose 43 | sudo chmod +x /usr/local/bin/docker-compose 44 | docker-compose -v 45 | 46 | sudo sed -i '/^ExecStart.*$/c\ExecStart=/usr/bin/dockerd -H unix:// -H tcp://0.0.0.0:6513' /lib/systemd/system/docker.service 47 | # Restart docker daemon 48 | sudo systemctl daemon-reload 49 | sudo systemctl restart docker 50 | 51 | sudo gpasswd -a $USER docker 52 | echo "Please restart to apply change (y/Y)" && read 53 | 54 | ``` 55 | 56 | ++ On Mac/Windows, follow below guide 57 | 58 | ```sh 59 | + Docker for Mac: https://docs.docker.com/docker-for-mac/install/ 60 | + Docker for Windows: https://docs.docker.com/docker-for-windows/install/ 61 | ``` 62 | 63 | ```sh 64 | sudo ln -s /mnt/c/Program\ Files/Docker/Docker/resources/bin/docker.exe /usr/local/bin/docker 65 | sudo ln -s /mnt/c/Program\ Files/Docker/Docker/resources/bin/docker-compose.exe /usr/local/bin/docker-compose 66 | ``` 67 | 68 | [Reference](https://nickjanetakis.com/blog/setting-up-docker-for-windows-and-wsl-to-work-flawlessly) 69 | 70 | ### Start Development 71 | 72 | - In `server` 73 | 74 | ++ Add .env.environment 75 | 76 | ```sh 77 | # molculer config 78 | GATEWAY_PORT=3000 79 | JWT_SECRET=wMFgWrYwP19Red9dpn43hNMvSoaJrCMn 80 | TRANSPORTER_URL=nats://localhost:4222 81 | DATABASE_HOST=210.245.33.182 82 | DATABASE_NAME=ltvcoffee 83 | DATABASE_USER=ltvcoffee 84 | DATABASE_PASS=Ltv!@#123 85 | DATABASE_PORT=5432 86 | DATABASE_POOL_MIN=1 87 | DATABASE_POOL_MAX=1 88 | 89 | DB_REDIS_HOST=localhost 90 | DB_REDIS_PORT=6379 91 | WAIT_SERVICE_TIMEOUT=30 92 | SALT_ROUNDS=10 93 | APOLLO_ENGINE_KEY=service:ltv-coffee:OVXTgceZDzivrKVdC61d2g 94 | HASH_SECRET=H3GmHGm7eeTFjIBVg2DJz7HM5uin2uYu 95 | ``` 96 | 97 | ```sh 98 | # Project setup 99 | yarn 100 | 101 | # Compiles and hot-reloads for development 102 | yarn serve 103 | ``` 104 | 105 | After successfully start server, we have: 106 | 107 | | | Link | 108 | | -------------------------- | ---------------------------------------------------- | 109 | | **Server** | 110 | | + API Gateway | [GraphQL](http://localhost:3000/graphql) | 111 | | | 112 | | **Tools** | 113 | | + Postgres Admin dashboard | [Postgres Admin](http://localhost:5433) | 114 | | + Redis Admin Dashboard | [Redis Admin](http://localhost:6380) | 115 | | | 116 | | **External Services** | 117 | | + PostgresDB | running on port 5432 | 118 | | + Redis | running on port 6379 | 119 | | + NATS | running on port 4222(together with 4444, 6222, 8222) | 120 | 121 | ### 📒 Notes 122 | 123 | #### Database Migration 124 | 125 | ```sh 126 | # Review knex cmd 127 | knex --help 128 | 129 | # Create new migration file 130 | - At service folder 131 | - Use "knex" cmd, to create new migration file 132 | - Modify created file to update database schema 133 | 134 | $ knex migrate:make [FILE_NAME] 135 | 136 | # On start, service will auto run migration 137 | ``` 138 | 139 | #### Postgres Admin Dashboard 140 | 141 | ```sh 142 | # After successfully run docker to setup environemnt 143 | # pgAdmin 4 run on port 5433, open browser: 144 | http://localhost:5433 145 | 146 | # Login info 147 | + user: admin@ltv.vn 148 | + pass: Ltv!@#123 149 | 150 | # Add "new server" with config 151 | # + localhost 152 | | | Key | Value | 153 | | --- | ------------- | ------------ | 154 | | 1 | Connection | postgres10.5 | 155 | | 2 | Port | 5432 | 156 | | 3 | Database name | ltvcoffee | 157 | | 4 | User | ltvcoffee | 158 | | 5 | Pass | Ltv!@#123 | 159 | 160 | # + coffee server 161 | # TODO: UPDATE CONFIG 162 | | | Key | Value | 163 | | --- | ------------- | ---------- | 164 | | 1 | Connection | ltv.coffee | 165 | | 2 | Port | 5432 | 166 | | 3 | Database name | postgres | 167 | | 4 | User | postgres | 168 | | 5 | Pass | postgres | 169 | ``` 170 | 171 | - Screenshot 172 | 173 | ![pgAdmin4](/docs/images/pgadmin-4-2018-10-23_153736.png) 174 | 175 | #### Built-in GraphQL 176 | 177 | GraphQL's schema on services will be merged by API-gateway to create GraphQL playground [http://localhost:3000/graphql](http://localhost:3000/graphql). Run & test query with GraphQL's playground 178 | 179 | ![GraphQL's playground](/docs/images/GraphQL.png) 180 | 181 | ## ⚒️ LTV Coffee Frontend 182 | 183 | Frontend with VUE framework 184 | 185 | Path: `$project/packages/client` 186 | -------------------------------------------------------------------------------- /packages/backend/services/core/config.svc.ts: -------------------------------------------------------------------------------- 1 | /* eslint-disable @typescript-eslint/no-explicit-any */ 2 | import { Config, GlobalReject, Prisma } from '@prisma/client' 3 | import flattenDeep from 'lodash/flattenDeep' 4 | import isEqual from 'lodash/isEqual' 5 | import uniqBy from 'lodash/uniqBy' 6 | import { Context, Errors, Utils } from 'moleculer' 7 | import { Action, Service } from 'moleculer-decorators' 8 | import { PrismaService } from 'shared/core/prisma.svc' 9 | import { PrismaMixin } from 'shared/mixins/prisma.mixin' 10 | 11 | type ModelDelegate = Prisma.ConfigDelegate 12 | 13 | const { ValidationError } = Errors 14 | 15 | @Service({ 16 | name: 'config', 17 | version: 1, 18 | mixins: [ 19 | PrismaMixin({ 20 | prisma: { 21 | model: 'Config', 22 | }, 23 | }), 24 | ], 25 | settings: { 26 | defaultConfig: { 27 | 'mail.enabled': 'false', 28 | 'mail.from': 'no-reply@ltv.dev', 29 | }, 30 | }, 31 | }) 32 | export default class ConfigService extends PrismaService { 33 | // ACTIONS (S) 34 | /** 35 | * Get configurations by key or keys 36 | * 37 | * @actions 38 | * @param {String|Array} key 39 | * @returns {Object|Array} 40 | */ 41 | 42 | @Action({ 43 | name: 'get', 44 | cache: { 45 | keys: ['key'], 46 | }, 47 | }) 48 | actGet(ctx: Context): Promise { 49 | if (ctx.params.key == null) 50 | throw new ValidationError("Param 'key' must be defined.", 'ERR_KEY_NOT_DEFINED') 51 | return this.get(ctx.params.key) 52 | } 53 | 54 | /** 55 | * Set configuration values by keys 56 | * 57 | * @actions 58 | * @param {String} key 59 | * @param {any} key 60 | * @returns {Object|Array} 61 | */ 62 | @Action({ 63 | name: 'set', 64 | }) 65 | async actSet(ctx: Context): Promise { 66 | if (Array.isArray(ctx.params)) { 67 | return Promise.all( 68 | ctx.params.map(async (p: Config) => { 69 | const { changed, item } = await this.set(p.key, p.value) 70 | if (changed) ctx.broker.broadcast(`config.changed`, item) 71 | 72 | return item 73 | }), 74 | ) 75 | } else { 76 | const { changed, item } = await this.set(ctx.params.key, ctx.params.value) 77 | if (changed) ctx.broker.broadcast(`config.changed`, item) 78 | 79 | return item 80 | } 81 | } 82 | 83 | @Action({ cache: true }) 84 | all(): Promise { 85 | return this.model.findMany({}) 86 | } 87 | // ACTIONS (E) 88 | 89 | // METHODS (S) 90 | /** 91 | * Get configurations by key. 92 | * 93 | * @methods 94 | * @param {String|Array} key Config key 95 | * @returns {Object|Array} 96 | */ 97 | async get(key: string | string[]): Promise { 98 | if (Array.isArray(key)) { 99 | const res = await Promise.all(key.map((k) => this.getByMask(k))) 100 | return uniqBy(flattenDeep(res), (item) => item.key) 101 | } 102 | 103 | if (key.indexOf('*') == -1 && key.indexOf('?') == -1) { 104 | return this.model.findUnique({ where: { key } }) 105 | } 106 | 107 | return this.getByMask(key) 108 | } 109 | 110 | /** 111 | * Get configurations by key mask. 112 | * 113 | * @methods 114 | * @param {String} mask Key mask 115 | * @returns {Array} 116 | */ 117 | async getByMask(mask: string): Promise { 118 | const allItems: Config[] = await this.broker.call(`${this.fullName}.all`) 119 | 120 | /* istanbul ignore next */ 121 | if (!allItems) return [] 122 | 123 | return allItems.filter((item) => Utils.match(item.key, mask)) 124 | } 125 | 126 | /** 127 | * Check whether a configuration key exists. 128 | * 129 | * @methods 130 | * @param {String} key 131 | * @returns {Boolean} 132 | */ 133 | has(key: string): Promise { 134 | return this.model.findUnique({ where: { key } }).then((res) => !!res) 135 | } 136 | 137 | /** 138 | * Set a configuration value. 139 | * 140 | * @methods 141 | * @param {String} key Key 142 | * @param {any} value Value 143 | * @param {Boolean} isDefault 144 | * 145 | * @returns {Object} 146 | */ 147 | // eslint-disable-next-line @typescript-eslint/explicit-module-boundary-types 148 | async set(key: string, value: string, isDefault = false) { 149 | const item = await this.model.findUnique({ where: { key } }) 150 | if (item != null) { 151 | if (!isEqual(item.value, value)) { 152 | // Modify 153 | return { 154 | item: await this.model.update({ where: { key }, data: { value: { set: value } } }), 155 | changed: true, 156 | } 157 | } 158 | 159 | // No changes 160 | return { 161 | item, 162 | changed: false, 163 | } 164 | } 165 | 166 | // Create new 167 | return { 168 | item: await this.model.create({ data: { key, value, isDefault } }), 169 | changed: true, 170 | new: true, 171 | } 172 | } 173 | 174 | /** 175 | * Run configuration migration. Add missing keys. 176 | * 177 | * @methods 178 | * @private 179 | */ 180 | // eslint-disable-next-line @typescript-eslint/explicit-module-boundary-types 181 | migrateConfig() { 182 | return Promise.all( 183 | Object.keys(this.settings.defaultConfig).map(async (key) => { 184 | const value = this.settings.defaultConfig[key] 185 | const item = (await this.get(key)) as Config // in this case, alway return single record 186 | if (!item) { 187 | this.logger.info(`Save new config: "${key}" =`, value) 188 | return this.set(key, value, true) 189 | } else if (item.isDefault && !isEqual(item.value, value)) { 190 | this.logger.info(`Update default config: "${key}" =`, value) 191 | return this.set(key, value, true) 192 | } 193 | }), 194 | ) 195 | } 196 | // METHODS (E) 197 | 198 | // HOOKS (S) 199 | public started(): void { 200 | this.migrateConfig().then(() => this.logger.info('Updated default configs')) 201 | } 202 | // HOOKS (E) 203 | } 204 | -------------------------------------------------------------------------------- /packages/docs/src/dev/unit-test.md: -------------------------------------------------------------------------------- 1 | # Unit Test 2 | 3 | Jest follows the #0CJS practices for Zero Configuration, where even though it is extendible with many configuration variables, it just works out of the box and you don’t need to configure anything special. 4 | 5 | Jest acts as a **test runner**, **assertion library**, and **mocking library**. 6 | 7 | ## Setup 8 | 9 | ### Creating a test file 10 | 11 | Jest will [look for tests][6] in any of the following places: 12 | 13 | - Files with `.js` suffix in `__tests__` folders. 14 | - Files with `.test.js` suffix. 15 | - Files with `.spec.js` suffix. 16 | 17 | #### Sample test 18 | 19 | For simple: 20 | 21 | ```js 22 | it('should render correctly with no props', () _=>_ { 23 | const component = shallow(); 24 | expect(component).toMatchSnapshot(); 25 | }); 26 | 27 | 28 | it('should render banner text correctly with given strings', () _=>_ { 29 | const strings = ['one', 'two']; 30 | const component = shallow(); 31 | expect(component).toMatchSnapshot(); 32 | }); 33 | ``` 34 | 35 | ### Running test 36 | 37 | ```sh 38 | # Test all cases 39 | yarn test 40 | 41 | # Test on single directory 42 | yarn test [PATH TO DIR] 43 | 44 | # Test on single file 45 | yarn test [PATH TO FILE] 46 | ``` 47 | 48 | ## Details 49 | 50 | Install Jest using [`yarn`](https://yarnpkg.com/en/package/jest): 51 | 52 | ```bash 53 | yarn add --dev jest 54 | ``` 55 | 56 | Or [`npm`](https://www.npmjs.com/): 57 | 58 | ```bash 59 | npm install --save-dev jest 60 | ``` 61 | 62 | Note: Jest documentation uses `yarn` commands, but `npm` will also work. You can compare `yarn` and `npm` commands in the [yarn docs, here](https://yarnpkg.com/en/docs/migrating-from-npm#toc-cli-commands-comparison). 63 | 64 | Let's get started by writing a test for a hypothetical function that adds two numbers. First, create a `sum.js` file: 65 | 66 | ```javascript 67 | function sum(a, b) { 68 | return a + b; 69 | } 70 | module.exports = sum; 71 | ``` 72 | 73 | Then, create a file named `sum.test.js`. This will contain our actual test: 74 | 75 | ```javascript 76 | const sum = require('./sum'); 77 | 78 | test('adds 1 + 2 to equal 3', () => { 79 | expect(sum(1, 2)).toBe(3); 80 | }); 81 | ``` 82 | 83 | Add the following section to your `package.json`: 84 | 85 | ```json 86 | { 87 | "scripts": { 88 | "test": "jest" 89 | } 90 | } 91 | ``` 92 | 93 | Finally, run `yarn test` or `npm run test` and Jest will print this message: 94 | 95 | ```bash 96 | PASS ./sum.test.js 97 | ✓ adds 1 + 2 to equal 3 (5ms) 98 | ``` 99 | 100 | **You just successfully wrote your first test using Jest!** 101 | 102 | This test used `expect` and `toBe` to test that two values were exactly identical. To learn about the other things that Jest can test, see [Using Matchers](UsingMatchers.md). 103 | 104 | ### Running from command line 105 | 106 | You can run Jest directly from the CLI (if it's globally available in your `PATH`, e.g. by `yarn global add jest` or `npm install jest --global`) with a variety of useful options. 107 | 108 | Here's how to run Jest on files matching `my-test`, using `config.json` as a configuration file and display a native OS notification after the run: 109 | 110 | ```bash 111 | jest my-test --notify --config=config.json 112 | ``` 113 | 114 | If you'd like to learn more about running `jest` through the command line, take a look at the [Jest CLI Options](CLI.md) page. 115 | 116 | ### Additional Configuration 117 | 118 | #### Generate a basic configuration file 119 | 120 | Based on your project, Jest will ask you a few questions and will create a basic configuration file with a short description for each option: 121 | 122 | ```bash 123 | jest --init 124 | ``` 125 | 126 | #### Using Babel 127 | 128 | To use [Babel](http://babeljs.io/), install the `babel-jest` and `regenerator-runtime` packages via `yarn`: 129 | 130 | ```bash 131 | yarn add --dev babel-jest babel-core regenerator-runtime 132 | ``` 133 | 134 | Or `npm`: 135 | 136 | ```bash 137 | npm install --save-dev babel-jest babel-core regenerator-runtime 138 | ``` 139 | 140 | > Note: If you are using Babel version 7 then you need to install `babel-jest`, `babel-core@^7.0.0-bridge.0` and `@babel/core` with the following command or its `npm` equivalent: 141 | > 142 | > ```bash 143 | > yarn add --dev babel-jest babel-core@^7.0.0-bridge.0 @babel/core regenerator-runtime 144 | > ``` 145 | > 146 | > You will need to use `babel.config.js` in order to transpile `node_modules`. See https://babeljs.io/docs/en/next/config-files for more information. 147 | > 148 | > You can also see the example in the Jest repository: https://github.com/facebook/jest/tree/master/examples/babel-7 149 | 150 | _Note: Explicitly installing `regenerator-runtime` is not needed if you use `npm` 3 or 4 or Yarn_ 151 | 152 | Don't forget to add a [`.babelrc`](https://babeljs.io/docs/usage/babelrc/) file in your project's root folder. For example, if you are using ES6 and [React.js](https://facebook.github.io/react/) with the [`babel-preset-env`](https://babeljs.io/docs/plugins/preset-env/) and [`babel-preset-react`](https://babeljs.io/docs/plugins/preset-react/) presets: 153 | 154 | ```json 155 | { 156 | "presets": ["env", "react"] 157 | } 158 | ``` 159 | 160 | You are now set up to use all ES6 features and React specific syntax. 161 | 162 | > Note: If you are using a more complicated Babel configuration, using Babel's `env` option, keep in mind that Jest will automatically define `NODE_ENV` as `test`. It will not use `development` section like Babel does by default when no `NODE_ENV` is set. 163 | 164 | > Note: If you've turned off transpilation of ES6 modules with the option `{ "modules": false }`, you have to make sure to turn this on in your test environment. 165 | 166 | ```json 167 | { 168 | "presets": [["env", { "modules": false }], "react"], 169 | "env": { 170 | "test": { 171 | "presets": [["env"], "react"] 172 | } 173 | } 174 | } 175 | ``` 176 | 177 | > Note: `babel-jest` is automatically installed when installing Jest and will automatically transform files if a babel configuration exists in your project. To avoid this behavior, you can explicitly reset the `transform` configuration option: 178 | 179 | ```json 180 | // package.json 181 | { 182 | "jest": { 183 | "transform": {} 184 | } 185 | } 186 | ``` 187 | 188 | #### Using webpack 189 | 190 | Jest can be used in projects that use [webpack](https://webpack.github.io/) to manage assets, styles, and compilation. webpack does offer some unique challenges over other tools. Refer to the [webpack guide](Webpack.md) to get started. 191 | 192 | #### Using TypeScript 193 | 194 | To use TypeScript in your tests you can use [ts-jest](https://github.com/kulshekhar/ts-jest). 195 | -------------------------------------------------------------------------------- /packages/backend/shared/mixins/prisma.mixin.ts: -------------------------------------------------------------------------------- 1 | import env from '@ltv/env' 2 | import { Prisma, PrismaClient } from '@prisma/client' 3 | import merge from 'lodash/merge' 4 | import { 5 | ActionSchema, 6 | Context, 7 | Errors, 8 | GenericObject, 9 | ServiceSchema, 10 | ServiceSettingSchema 11 | } from 'moleculer' 12 | 13 | export interface PrismaMixinOptions { 14 | name?: string 15 | prisma: { 16 | options?: Prisma.PrismaClientOptions 17 | model: string 18 | } 19 | graphql?: boolean 20 | permissions?: { 21 | [action: string]: string[] 22 | } 23 | createActions?: boolean 24 | } 25 | 26 | const defaultOptions: Partial = { 27 | createActions: true 28 | } 29 | 30 | export function PrismaMixin(pmo: PrismaMixinOptions): ServiceSchema { 31 | let actions: ActionSchema = {} 32 | const options = merge(defaultOptions, pmo) 33 | 34 | if (options.createActions) { 35 | actions = { 36 | create: { 37 | params: { 38 | data: { type: 'object' }, 39 | }, 40 | handler(ctx: Context) { 41 | return this.model.create(ctx.params).catch(this.handleError) 42 | }, 43 | }, 44 | updateById: { 45 | params: { 46 | data: { type: 'object' }, 47 | id: { type: 'any' }, 48 | }, 49 | handler(ctx: Context) { 50 | const { id, data } = ctx.params 51 | return this.model.update({ data, where: { id } }).catch(this.handleError) 52 | }, 53 | }, 54 | deleteById: { 55 | params: { 56 | id: { type: 'any' }, 57 | }, 58 | handler(ctx: Context<{ id: string | number }>) { 59 | const { id } = ctx.params 60 | return this.model.delete({ where: { id } }).catch(this.handleError) 61 | }, 62 | }, 63 | count: { 64 | params: { 65 | select: { type: 'object', optional: true }, 66 | include: { type: 'object', optional: true }, 67 | where: { type: 'object', optional: true }, 68 | orderBy: { type: 'object', optional: true }, 69 | cursor: { type: 'object', optional: true }, 70 | take: { type: 'number', optional: true }, 71 | skip: { type: 'number', optional: true }, 72 | distinct: { type: 'object', optional: true }, 73 | }, 74 | handler(ctx: Context) { 75 | const { select, include, where, orderBy, cursor, take, skip, distinct } = ctx.params 76 | return this.model.count({ 77 | select, 78 | include, 79 | where, 80 | orderBy, 81 | cursor, 82 | take, 83 | skip, 84 | distinct, 85 | }) 86 | }, 87 | }, 88 | find: { 89 | params: { 90 | select: { type: 'object', optional: true }, 91 | include: { type: 'object', optional: true }, 92 | where: { type: 'object', optional: true }, 93 | orderBy: { type: 'object', optional: true }, 94 | cursor: { type: 'object', optional: true }, 95 | take: { type: 'number', optional: true }, 96 | skip: { type: 'number', optional: true }, 97 | distinct: { type: 'object', optional: true }, 98 | }, 99 | cache: { 100 | enabled: true, 101 | }, 102 | async handler(ctx: Context) { 103 | const { select, include, where, orderBy, cursor, take, skip, distinct } = ctx.params 104 | return this.model 105 | .findMany({ 106 | select, 107 | include, 108 | where, 109 | orderBy, 110 | cursor, 111 | take, 112 | skip, 113 | distinct, 114 | }) 115 | .catch(this.handleError) 116 | }, 117 | }, 118 | findOne: { 119 | params: { 120 | select: { type: 'object', optional: true }, 121 | include: { type: 'object', optional: true }, 122 | where: { type: 'object', optional: true }, 123 | }, 124 | cache: { 125 | enabled: true, 126 | }, 127 | handler(ctx: Context) { 128 | const { select, include, where } = ctx.params 129 | return this.model.findOne({ select, include, where }) 130 | }, 131 | }, 132 | } 133 | const permissions = options.permissions || {} 134 | const actionNames = Object.keys(permissions) 135 | actionNames.forEach((action) => { 136 | actions[action].permissions = permissions[action] || [] 137 | }) 138 | } 139 | 140 | return { 141 | name: options && options.name, 142 | created() { 143 | const opts: Prisma.PrismaClientOptions = { 144 | ...(options && options.prisma && options.prisma.options), 145 | log: [ 146 | { emit: 'event', level: 'query' }, 147 | { emit: 'event', level: 'info' }, 148 | { emit: 'event', level: 'warn' }, 149 | { emit: 'event', level: 'error' }, 150 | ], 151 | errorFormat: 'minimal', 152 | } 153 | const prisma = new PrismaClient(opts) 154 | // eslint-disable-next-line @typescript-eslint/no-explicit-any 155 | prisma.$on('query', (e: Prisma.QueryEvent) => { 156 | this.logger.info( 157 | '\n\x1b[36m -> Query: \x1b[0m', 158 | `\x1b[35m ${e.query} \x1b[0m`, 159 | '\n\x1b[36m -> Params: \x1b[0m', 160 | `\x1b[35m ${e.params} \x1b[0m`, 161 | '\n\x1b[36m -> Duration: \x1b[0m', 162 | `\x1b[35m ${e.duration} \x1b[0m`, 163 | ) 164 | }) 165 | // prisma.$on('info', (e: Prisma.LogEvent) => { 166 | // this.logger.info(e.message) 167 | // }) 168 | // eslint-disable-next-line @typescript-eslint/no-explicit-any 169 | prisma.$on('error', (e: Prisma.LogEvent) => { 170 | this.logger.error(e.message) 171 | }) 172 | this.prisma = prisma 173 | this.settings = merge(this.settings, { database: { table: options?.prisma.model } }) 174 | }, 175 | async started() { 176 | try { 177 | await this.prisma.$connect() 178 | this.logger.debug(`Connected to database ${env('DATABASE_HOST')}:${env('DATABASE_PORT')}`) 179 | } catch (e) { 180 | throw new Errors.MoleculerServerError('Unable to connect to database.', e.message) 181 | } 182 | }, 183 | async stopped() { 184 | if (this.prisma) { 185 | this.logger.info('Closing prisma connection...') 186 | await this.prisma.$disconnect() 187 | this.prisma = null 188 | } 189 | }, 190 | methods: { 191 | handleError(error: Prisma.PrismaClientKnownRequestError) { 192 | this.logger.error(error.message) 193 | return Promise.reject( 194 | new Errors.MoleculerClientError(error.message, 500, 'DATABASE_ERROR', error.meta), 195 | ) 196 | }, 197 | }, 198 | actions, 199 | } as ServiceSchema 200 | } 201 | -------------------------------------------------------------------------------- /packages/docs/src/dev/prerequisite.md: -------------------------------------------------------------------------------- 1 | # Prerequisites 2 | 3 | ## OS 4 | 5 | - MacOS 10.12+ 6 | - Ubuntu 16.04+ 7 | - Windows is not fully supported 8 | 9 | ## Tools 10 | 11 | ### 1. brew 12 | 13 | ```bash 14 | /usr/bin/ruby -e "$(curl -fsSL https://raw.githubusercontent.com/Homebrew/install/master/install)" 15 | ``` 16 | 17 | References: [https://brew.sh/](https://brew.sh/) 18 | 19 | ### 2. zsh 20 | 21 | #### Install and set up zsh as default 22 | 23 | If necessary, follow these steps to install Zsh: 24 | 25 | 1. There are two main ways to install Zsh 26 | 27 | - with the package manager of your choice, _e.g._ `sudo apt-get install zsh` (see [below for more examples](#how-to-install-zsh-in-many-platforms)) 28 | - from [source](http://zsh.sourceforge.net/Arc/source.html), following 29 | [instructions from the Zsh FAQ](http://zsh.sourceforge.net/FAQ/zshfaq01.html#l7) 30 | 31 | 2. Verify installation by running `zsh --version`. Expected result: `zsh 5.1.1` or more recent. 32 | 3. Make it your default shell: `chsh -s $(which zsh)` 33 | 34 | - Note that this will not work if Zsh is not in your authorized shells list (`/etc/shells`) 35 | or if you don't have permission to use `chsh`. If that's the case [you'll need to use a different procedure](https://www.google.com/search?q=zsh+default+without+chsh). 36 | 37 | 4. Log out and login back again to use your new default shell. 38 | 5. Test that it worked with `echo $SHELL`. Expected result: `/bin/zsh` or similar. 39 | 6. Test with `$SHELL --version`. Expected result: 'zsh 5.1.1' or similar 40 | 41 | #### How to install zsh in many platforms 42 | 43 | ##### macOS 44 | 45 | **Try `zsh --version` before installing it from Homebrew. If it's newer than 4.3.9 46 | you _might_ be OK. Preferably newer than or equal to `5.0`.** 47 | 48 | ```sh 49 | brew install zsh zsh-completions 50 | ``` 51 | 52 | Assuming you have [Homebrew](http://brew.sh/) installed. If not, most versions of 53 | **macOS** ship zsh by default, but it's normally an older version. Alternatively, you may 54 | also use [MacPorts](https://www.macports.org/) 55 | 56 | ```sh 57 | sudo port install zsh zsh-completions 58 | ``` 59 | 60 | ##### Ubuntu, Debian & derivatives 61 | 62 | ```sh 63 | apt install zsh 64 | ``` 65 | 66 | If you don't have `apt`, the recommended package manager for end users 67 | [ [1] ](http://askubuntu.com/a/446484) 68 | [ [2] ](http://askubuntu.com/a/775264) 69 | [ [3] ](https://help.ubuntu.com/lts/serverguide/apt.html) 70 | [ [4] ](http://www.howtogeek.com/234583/simplify-command-line-package-management-with-apt-instead-of-apt-get/) 71 | , you can try `apt-get` or `aptitude`. 72 | 73 | [Other distributions that apply](https://en.wikipedia.org/wiki/List_of_Linux_distributions#Debian-based) include: 74 | Linux Mint, elementary OS, Zorin OS, Raspbian, MX Linux, Deepin. 75 | 76 | ### 3. oh-my-zsh 77 | 78 | Oh-My-Zsh is a framework for [Zsh](http://www.zsh.org), the Z shell. 79 | 80 | - In order for Oh-My-Zsh to work, Zsh must be installed. 81 | - Please run `zsh --version` to confirm. 82 | - Expected result: `zsh 5.1.1` or more recent 83 | - Additionally, Zsh should be set as your default shell. 84 | - Please run `echo $SHELL` from a new terminal to confirm. 85 | - Expected result: `usr/bin/zsh` or similar 86 | 87 | #### Basic Installation 88 | 89 | ##### via curl 90 | 91 | ```bash 92 | sh -c "$(curl -fsSL https://raw.githubusercontent.com/robbyrussell/oh-my-zsh/master/tools/install.sh)" 93 | ``` 94 | 95 | ##### via wget 96 | 97 | ```bash 98 | sh -c "$(wget https://raw.githubusercontent.com/robbyrussell/oh-my-zsh/master/tools/install.sh -O -)" 99 | ``` 100 | 101 | References: [https://github.com/robbyrussell/oh-my-zsh](https://github.com/robbyrussell/oh-my-zsh) 102 | 103 | ### 4. NodeJS 10 LTS 104 | 105 | #### macOS 106 | 107 | Update `brew` before installing 108 | 109 | ```bash 110 | brew update 111 | ``` 112 | 113 | Install NodeJS v10 114 | 115 | ```bash 116 | brew install node@10 117 | ``` 118 | 119 | Check version 120 | 121 | ```bash 122 | node -v 123 | ``` 124 | 125 | The result should be 126 | 127 | ``` 128 | v10.13.0 129 | ``` 130 | 131 | #### Linux (Ubuntu) 132 | 133 | First, make sure you have curl installed: 134 | 135 | ```bash 136 | sudo apt install curl 137 | ``` 138 | 139 | Then download and execute the Node.js 10.x installer: 140 | 141 | ```bash 142 | curl -sL https://deb.nodesource.com/setup_10.x | sudo -E bash - 143 | ``` 144 | 145 | This shouldn’t take too long and will add a source file for the official Node.js 10.x repo, grabs the signing key and will run apt update. 146 | 147 | Note, if you have used the installer script for an older version of Node.js, running the aforementioned will write over the previous changes. 148 | 149 | Once the installer is done doing it’s thing, you will need to install (or upgrade) Node.js: 150 | 151 | ```bash 152 | sudo apt install nodejs 153 | ``` 154 | 155 | That’ll do it, you’re all set with the latest and greatest version of Node.js 10.x on Ubuntu 18.04 LTS! 156 | 157 | ### 5. VSCode 158 | 159 | #### Download 160 | 161 | Download from here [https://code.visualstudio.com/](https://code.visualstudio.com/) 162 | 163 | #### Extension 164 | 165 | 1. prettier 166 | 2. tslint 167 | 3. 168 | 169 | #### User Configurations 170 | 171 | - Please install flowing extensions in `VSCode` (_optional_) 172 | 173 | ```sh 174 | # Code convention 175 | code --install-extension~ dbaeumer.vscode-eslint 176 | code --install-extension~ eg2.tslint 177 | code --install-extension~ esbenp.prettier-vscode 178 | code --install-extension~ codezombiech.gitignore 179 | code --install-extension~ eamodio.gitlens 180 | code --install-extension~ aaron-bond.better-comments 181 | code --install-extension~ formulahendry.auto-close-tag 182 | code --install-extension~ formulahendry.auto-rename-tag 183 | code --install-extension~ DavidAnson.vscode-markdownlint 184 | code --install-extension~ joelday.docthis 185 | code --install-extension~ robinbentley.sass-indented 186 | code --install-extension~ mikestead.dotenv 187 | code --install-extension~ octref.vetur 188 | code --install-extension~ CoenraadS.bracket-pair-colorizer 189 | 190 | # Tools 191 | code --install-extension~ ChakrounAnas.turbo-console-log 192 | code --install-extension~ christian-kohler.npm-intellisense 193 | code --install-extension~ christian-kohler.path-intellisense 194 | code --install-extension~ DavidAnson.vscode-markdownlint 195 | code --install-extension~ kumar-harsh.graphql-for-vscode 196 | code --install-extension~ msjsdiag.debugger-for-chrome 197 | ``` 198 | 199 | - User settings 200 | 201 | Steps: 202 | 203 | - Open user settings json file 204 | - Copy & Paste follow config 205 | 206 | ```json 207 | { 208 | // Editor Tweak 209 | "files.eol": "\n", 210 | "editor.tabSize": 2, 211 | "editor.insertSpaces": true, 212 | "editor.formatOnSave": true, 213 | "editor.renderWhitespace": "none", 214 | 215 | // Eslint 216 | "eslint.autoFixOnSave": true, 217 | 218 | // TS lint 219 | "tslint.autoFixOnSave": true, 220 | 221 | // Prettier 222 | "prettier.printWidth": 120, 223 | "prettier.singleQuote": true, 224 | "prettier.trailingComma": "all", 225 | "prettier.eslintIntegration": true, 226 | 227 | // Git Lens 228 | "gitlens.advanced.messages": { 229 | "suppressShowKeyBindingsNotice": true 230 | }, 231 | "gitlens.hovers.enabled": false, 232 | "gitlens.codeLens.enabled": false, 233 | "gitlens.statusBar.enabled": false, 234 | "gitlens.currentLine.enabled": false, 235 | "gitlens.blame.heatmap.enabled": false, 236 | "gitlens.mode.statusBar.enabled": false, 237 | "gitlens.blame.highlight.enabled": false, 238 | "gitlens.hovers.currentLine.over": "line", 239 | 240 | // Vetur 241 | "vetur.format.defaultFormatter.ts": "prettier", 242 | "vetur.format.defaultFormatter.js": "prettier", 243 | "vetur.format.defaultFormatter.css": "prettier", 244 | "vetur.format.defaultFormatter.less": "prettier", 245 | "vetur.format.defaultFormatter.scss": "prettier", 246 | "vetur.format.defaultFormatter.postcss": "prettier", 247 | "vetur.format.defaultFormatter.stylus": "stylus-supremacy" 248 | } 249 | ``` 250 | 251 | #### for add more 252 | 253 | ```json 254 | { 255 | // Batch link 256 | "terminal.integrated.shell.windows": "C:\\Windows\\System32\\bash.exe", 257 | "terminal.integrated.shellArgs.windows": ["-c", "zsh"], 258 | "git.path": "/usr/bin/git" 259 | } 260 | ``` 261 | 262 | ### 6. Hyper Terminal 263 | 264 | ```json 265 | { 266 | // Batch link 267 | "shell": "C:\\Windows\\System32\\bash.exe", 268 | 269 | // for setting shell arguments (i.e. for using interactive shellArgs: `['-i']`) 270 | // by default `['--login']` will be used 271 | "shellArgs": ["-c", "zsh && cd ~"] 272 | } 273 | ``` 274 | 275 | ### 7. Docker 276 | 277 | ![link-docker-into-sub-ubuntu](/docs/images/link-docker-into-sub-ubuntu.png) 278 | 279 | ## Local Environments 280 | 281 | 1. Make sure you have `NODE_ENV` is `development` 282 | Check: `echo $NODE_ENV`. It should be: `development`
283 | If not. Please set the `NODE_ENV` as below:
284 | 285 | ```bash 286 | echo "NODE_ENV=development" >> ~/.zshrc 287 | ``` 288 | 289 | And source it 290 | 291 | ```bash 292 | source ~/.zshrc 293 | ``` 294 | 295 | 2. Others 296 | -------------------------------------------------------------------------------- /packages/backend/shared/mixins/graphql.mixin.ts: -------------------------------------------------------------------------------- 1 | import { Prisma, PrismaClient } from '.prisma/client' 2 | import env from '@ltv/env' 3 | import isArray from 'lodash/isArray' 4 | import isFunction from 'lodash/isFunction' 5 | import isString from 'lodash/isString' 6 | import merge from 'lodash/merge' 7 | import omit from 'lodash/omit' 8 | import omitBy from 'lodash/omitBy' 9 | import pick from 'lodash/pick' 10 | import { 11 | ActionSchema, Context, Errors, GenericObject, 12 | ServiceSchema, 13 | ServiceSettingSchema 14 | } from 'moleculer' 15 | import { camelCase, pluralize, upperFirst } from 'shared/utils/graphql' 16 | import { customPrismaLog } from 'shared/utils/prisma' 17 | 18 | 19 | export type PrimaryKeyType = 'String' | 'Int' 20 | export type PrimaryType = { 21 | [key: string]: PrimaryKeyType 22 | } 23 | 24 | export type GraphQLMixinActionOptions = { 25 | disabled?: boolean 26 | permissions?: [] 27 | } 28 | 29 | export interface GraphqlMixinTableOptions { 30 | name: string 31 | primary: PrimaryType 32 | } 33 | 34 | export interface GraphqlMixinOptions { 35 | name?: string 36 | table: GraphqlMixinTableOptions 37 | datasource?: { 38 | service: string 39 | } 40 | prisma?: Prisma.PrismaClientOptions 41 | actions?: { 42 | [key: string]: GraphQLMixinActionOptions 43 | } 44 | } 45 | 46 | export interface GraphQLCreateInput { 47 | clientMutationId?: string 48 | input: { 49 | [key: string]: T 50 | } 51 | } 52 | 53 | export interface GraphQLUpdateInputById { 54 | [key: string]: string 55 | } 56 | 57 | export interface GraphQLUpdateInput { 58 | clientMutationId?: string 59 | input: { 60 | [key: string]: T 61 | } 62 | } 63 | 64 | export interface PrismaCreateInput { 65 | data: T 66 | } 67 | 68 | function allQueryName(entityName: string) { 69 | return `all${pluralize(entityName)}` 70 | } 71 | 72 | function allQueryParams(entityName: string) { 73 | return `( 74 | # Only read the first 'n' values of the set. 75 | first: Int 76 | # Only read the last 'n' values of the set. 77 | last: Int 78 | # Skip the first 'n' values from our 'after' cursor, an alternative to cursor 79 | # based pagination. May not be used with 'last'. 80 | offset: Int 81 | # Read all values in the set before (above) this cursor. 82 | before: Cursor 83 | # Read all values in the set after (below) this cursor. 84 | after: Cursor 85 | # The method to use when ordering '${pluralize(entityName)}'. 86 | orderBy: [${pluralize(entityName)}OrderBy!] = [PRIMARY_KEY_ASC] 87 | # A condition to be used in determining which values should be returned by the collection. 88 | condition: ${entityName}Condition 89 | # A filter to be used in determining which values should be returned by the collection. 90 | filter: ${entityName}Filter 91 | )` 92 | } 93 | 94 | function singleQueryName(entityName: string) { 95 | return `${entityName}`.toLowerCase() 96 | } 97 | 98 | function combineKeys(keys: string[]) { 99 | return keys.map((k) => upperFirst(k)).join('And') 100 | } 101 | 102 | function byPrimaryQueryName(entityName: string, keys: string[]) { 103 | return `${singleQueryName(entityName)}By${combineKeys(keys)}` 104 | } 105 | 106 | function byPrimaryQueryParams(primary: PrimaryType) { 107 | const keys = Object.keys(primary) 108 | const params = keys.map((k) => `${k}: ${primary[k]}!`) 109 | return '(' + params.join(', ') + ')' 110 | } 111 | 112 | interface PrismaQueryParams { 113 | take: number 114 | skip: number 115 | where: GenericObject 116 | } 117 | 118 | interface PostgraphileQueryParams { 119 | first: number 120 | last: number 121 | offset: number 122 | before: any 123 | after: any 124 | orderBy: string[] | GenericObject 125 | condition: any 126 | filter: any 127 | } 128 | 129 | function transformOrderBy(orderBy: string[]) { 130 | type OrderBy = { 131 | [key: string]: 'asc' | 'desc' 132 | } 133 | return orderBy.reduce((order: OrderBy, ob) => { 134 | const segments = ob.split('_') 135 | const sort = segments[segments.length - 1] 136 | const field = camelCase(ob.replace(`_${sort}`, '')) 137 | return { ...order, [field]: sort.toLocaleLowerCase() } 138 | }, {}) 139 | } 140 | 141 | const filterMap: GenericObject = { 142 | equalTo: 'equals', 143 | equalToInsensitive: 'equals', 144 | notEqualTo: 'not', 145 | notEqualToInsensitive: 'not', 146 | in: 'in', 147 | inInsensitive: 'in', 148 | notIn: 'notIn', 149 | notInInsensitive: 'notIn', 150 | lessThan: 'lt', 151 | lessThanInsensitive: 'lt', 152 | lessThanOrEqualTo: 'lte', 153 | lessThanOrEqualToInsensitive: 'lte', 154 | greaterThan: 'gt', 155 | greaterThanInsensitive: 'gt', 156 | greaterThanOrEqualTo: 'gte', 157 | greaterThanOrEqualToInsensitive: 'gte', 158 | includes: 'contains', 159 | includesInsensitive: 'contains', 160 | startsWith: 'startsWith', 161 | startsWithInsensitive: 'startsWith', 162 | endsWith: 'endsWith', 163 | endsWithInsensitive: 'endsWith', 164 | } 165 | 166 | function hasTotalCount(ctx: Context) { 167 | const query: string = (ctx?.options?.parentCtx?.params as any)?.req?.body?.query || '' 168 | return query.indexOf('totalCount') > 0 169 | } 170 | 171 | type TransformParamsType = PostgraphileQueryParams & PrismaQueryParams 172 | 173 | function transformFilter(filter: GenericObject): GenericObject { 174 | if (!filter) return {} 175 | const keys = Object.keys(filter) 176 | return keys.reduce((carry, key) => { 177 | if (['not'].indexOf(key) !== -1) { 178 | return { ...carry, [key.toUpperCase()]: transformFilter(filter[key]) } 179 | } 180 | if (['and', 'or'].indexOf(key) !== -1) { 181 | const arr: GenericObject[] = filter[key] 182 | return { ...carry, [key.toUpperCase()]: arr.map((item) => transformFilter(item)) } 183 | } 184 | const conditions = filter[key] 185 | const condKeys = Object.keys(conditions) 186 | return { 187 | ...carry, 188 | ...condKeys.reduce( 189 | (conCarry, k) => ({ ...conCarry, [key]: { [filterMap[k]]: conditions[k] } }), 190 | {}, 191 | ), 192 | } 193 | }, {}) 194 | } 195 | 196 | function transformParams(ctx: Context) { 197 | // before, after: todo 198 | const { first, last, offset, orderBy, filter, condition } = ctx.params 199 | // BELOW params should be transformed 200 | // include: { type: 'object', optional: true }, 201 | // where: { type: 'object', optional: true }, 202 | // cursor: { type: 'object', optional: true }, 203 | // distinct: { type: 'object', optional: true } 204 | ctx.params.where = transformFilter(filter) 205 | if (condition) { 206 | const keys = Object.keys(condition) 207 | ctx.params.where = { 208 | ...ctx.params.where, 209 | ...keys.reduce((carry, k) => ({ ...carry, [k]: { equals: condition[k] } }), {}), 210 | } 211 | } 212 | 213 | if (orderBy) { 214 | ctx.params.orderBy = transformOrderBy((orderBy as string[]) || []) 215 | // TODO: Find good way to add primaryKey as default 216 | delete ctx.params.orderBy.primaryKey 217 | } 218 | 219 | ctx.params.take = last ? -last : first 220 | ctx.params.skip = offset 221 | 222 | // TODO: Will look into cursor 223 | ctx.params = pick(ctx.params, [ 224 | 'where', 225 | 'orderBy', 226 | 'take', 227 | 'first', 228 | 'skip', 229 | 'cursor', 230 | ]) as TransformParamsType 231 | } 232 | 233 | function transformUpdateData(data: GenericObject) { 234 | const keys = Object.keys(data) 235 | return keys.reduce((carry, k) => ({ ...carry, [k]: { set: data[k] } }), {}) 236 | } 237 | 238 | async function transformPayload( 239 | responseKey: string | null, 240 | data: DataType | DataType[], 241 | options?: GraphQLTransformPayloadOptions, 242 | ): Promise | GraphQLPayload> { 243 | if (!data) { 244 | return null 245 | } 246 | 247 | if (isArray(data)) { 248 | const payload: GraphQLPayload = { 249 | nodes: data, 250 | } 251 | 252 | if (isFunction(options?.totalCount)) { 253 | payload.totalCount = await options.totalCount() 254 | } 255 | 256 | return payload 257 | } 258 | 259 | if (!isString(responseKey)) { 260 | throw new Error('Require response key when return an object. Ex: { user: userData }') 261 | } 262 | 263 | return { [responseKey]: data } 264 | } 265 | 266 | type CreateActionGraphQLType = { 267 | query?: string 268 | mutation?: string 269 | } 270 | 271 | type CreateActionOptions = ActionSchema & { 272 | graphql: CreateActionGraphQLType 273 | options?: GraphQLMixinActionOptions 274 | } 275 | 276 | function createActions({ name, graphql, handler, options }: CreateActionOptions): ActionSchema { 277 | if (options?.disabled) { 278 | return 279 | } 280 | return { 281 | name, 282 | graphql, 283 | handler, 284 | permissions: options?.permissions, 285 | } 286 | } 287 | 288 | const defaultActionsOptions: GraphQLMixinActionOptions = { 289 | disabled: false, 290 | } 291 | 292 | function extendDefaultActionOptions(options: { [key: string]: GraphQLMixinActionOptions }): { 293 | [key: string]: GraphQLMixinActionOptions 294 | } { 295 | options = options || {} 296 | const keys = Object.keys(options) 297 | return keys.reduce( 298 | (carry, k) => ({ ...carry, [k]: { ...defaultActionsOptions, ...options[k] } }), 299 | {}, 300 | ) 301 | } 302 | 303 | export function GraphQLMixin(options: GraphqlMixinOptions): ServiceSchema { 304 | options = { table: { name: '', primary: { id: 'Int' } }, ...(options || {}) } 305 | const dsServiceName = `tbl-${options.table.name.toLowerCase()}` 306 | if (!options.datasource) { 307 | options.datasource = { service: dsServiceName } 308 | } 309 | options.actions = extendDefaultActionOptions(options.actions) 310 | 311 | // QUERY 312 | const allQueryActName: string = allQueryName(options.table.name) 313 | const actAll: ActionSchema = createActions({ 314 | name: allQueryActName, 315 | graphql: { 316 | query: 317 | allQueryActName + 318 | allQueryParams(options.table.name) + 319 | `: ${pluralize(options.table.name)}Connection`, 320 | }, 321 | async handler(ctx: Context) { 322 | this.logger.debug(`♻ [GRAPHQL]: ${allQueryActName} > params: `, ctx.params) 323 | // const data = await ctx.call(`${options.datasource.service}.find`, ctx.params) 324 | const data = await this.model.findMany(ctx.params) 325 | if (!isFunction(this.transformPayload)) { 326 | return data 327 | } 328 | const tranOpts: GraphQLTransformPayloadOptions = {} 329 | if (hasTotalCount(ctx)) { 330 | tranOpts.totalCount = () => this.model.count({ where: ctx.params.where }) 331 | } 332 | 333 | return this.transformPayload(null, data, tranOpts) 334 | }, 335 | options: options.actions[allQueryActName], 336 | }) 337 | 338 | const byKeyQueryActName: string = byPrimaryQueryName( 339 | options.table.name, 340 | Object.keys(options.table.primary), 341 | ) 342 | const actByKey: ActionSchema = createActions({ 343 | name: byKeyQueryActName, 344 | graphql: { 345 | query: 346 | byKeyQueryActName + byPrimaryQueryParams(options.table.primary) + `: ${options.table.name}`, 347 | }, 348 | cache: { 349 | enabled: true, 350 | }, 351 | handler(ctx: Context) { 352 | this.logger.debug(`♻ [GRAPHQL]: ${byKeyQueryActName} > params: `, ctx.params) 353 | return this.model.findUnique({ where: ctx.params }) 354 | }, 355 | options: options.actions[byKeyQueryActName], 356 | }) 357 | 358 | const singleQueryActName: string = singleQueryName(options.table.name) 359 | const actSingle: ActionSchema = createActions({ 360 | name: singleQueryActName, 361 | graphql: { 362 | query: 363 | singleQueryActName + `(condition: ${options.table.name}Condition!): ${options.table.name}`, 364 | }, 365 | params: { 366 | condition: { 367 | type: 'object', 368 | optional: true, 369 | }, 370 | }, 371 | cache: { 372 | enabled: true, 373 | }, 374 | handler(ctx: Context) { 375 | this.logger.debug(`♻ [GRAPHQL]: ${singleQueryActName} > params: `, ctx.params) 376 | return this.model.findUnique({ where: ctx.params.condition }) 377 | }, 378 | options: options.actions[singleQueryActName], 379 | }) 380 | 381 | // MUTATIONS 382 | const actCreateName = `create${options.table.name}` 383 | const actCreate: ActionSchema = createActions({ 384 | name: actCreateName, 385 | graphql: { 386 | // ex: createUser(input: CreateUserInput!): CreateUserPayload 387 | mutation: `${actCreateName}(input: Create${options.table.name}Input!): Create${options.table.name}Payload`, 388 | }, 389 | async handler(ctx: Context>) { 390 | this.logger.debug(`♻ [GRAPHQL]: ${actCreateName} > params: `, ctx.params) 391 | return this.model.create({ 392 | data: ctx.params.input[camelCase(options.table.name)], 393 | }) 394 | }, 395 | options: options.actions[actCreateName], 396 | }) 397 | 398 | const updateBy = combineKeys(Object.keys(options.table.primary)) 399 | const actUpdateByIdName = `update${options.table.name}By${updateBy}` 400 | const actUpdateById: ActionSchema = createActions({ 401 | name: actUpdateByIdName, 402 | graphql: { 403 | // eg: updateUser(input: UpdateUserInput!): UpdateUserPayload 404 | mutation: `${actUpdateByIdName}(input: Update${options.table.name}By${updateBy}Input!): Update${options.table.name}Payload`, 405 | }, 406 | async handler(ctx: Context & GraphQLUpdateInputById>) { 407 | this.logger.debug(`♻ [GRAPHQL]: ${actUpdateByIdName} > params: `, ctx.params) 408 | const updatePatch = ctx.params.input[camelCase(options.table.name) + 'Patch'] 409 | 410 | return this.model.update({ 411 | where: { id: ctx.params.input['id'] }, 412 | data: this.transformUpdateData(omit(updatePatch, 'id')), 413 | }) 414 | }, 415 | options: options.actions[actUpdateByIdName], 416 | }) 417 | 418 | const actDeleteByIdName = `delete${options.table.name}ById` 419 | const actDeleteById: ActionSchema = createActions({ 420 | name: actDeleteByIdName, 421 | graphql: { 422 | // eg: deleteUser(input: DeleteUserInput!): DeleteUserPayload 423 | mutation: `${actDeleteByIdName}(input: Delete${options.table.name}ByIdInput!): Delete${options.table.name}Payload`, 424 | }, 425 | async handler(ctx: Context<{ input: { id: string | number } }>) { 426 | this.logger.debug(`♻ [GRAPHQL]: ${actDeleteByIdName} > params: `, ctx.params) 427 | const deleted = await this.model.delete({ 428 | where: { id: ctx.params.input['id'] }, 429 | }) 430 | 431 | if (!isFunction(this.transformPayload)) { 432 | return deleted 433 | } 434 | 435 | return { 436 | ...this.transformPayload(camelCase(options.table.name), deleted), 437 | [`deleted${options.table.name}Id`]: ctx.params.input['id'], 438 | } 439 | }, 440 | options: options.actions[actDeleteByIdName], 441 | }) 442 | 443 | const actions = omitBy( 444 | { 445 | // Query 446 | actAll, 447 | actByKey, 448 | actSingle, 449 | // Mutations 450 | actCreate, 451 | actUpdateById, 452 | actDeleteById, 453 | }, 454 | (act) => !act, 455 | ) 456 | 457 | const hooks = { 458 | before: { 459 | [allQueryActName]: transformParams, 460 | }, 461 | after: { 462 | [actCreateName]: async (_ctx: Context, res: any) => { 463 | return transformPayload(camelCase(options.table.name), res) 464 | }, 465 | [actUpdateByIdName]: (_ctx: Context, res: any) => { 466 | return transformPayload(camelCase(options.table.name), res) 467 | }, 468 | }, 469 | } 470 | 471 | return { 472 | name: options.name, 473 | settings: {}, 474 | hooks, 475 | actions, 476 | methods: { 477 | transformParams, 478 | transformUpdateData, 479 | transformPayload, 480 | handleError(error: Prisma.PrismaClientKnownRequestError) { 481 | this.logger.error(error.message) 482 | return Promise.reject( 483 | new Errors.MoleculerClientError(error.message, 500, 'DATABASE_ERROR', error.meta), 484 | ) 485 | }, 486 | }, 487 | created() { 488 | const opts: Prisma.PrismaClientOptions = { 489 | ...(options && options.prisma), 490 | log: [ 491 | { emit: 'event', level: 'query' }, 492 | { emit: 'event', level: 'info' }, 493 | { emit: 'event', level: 'warn' }, 494 | { emit: 'event', level: 'error' }, 495 | ], 496 | errorFormat: 'minimal', 497 | } 498 | const prisma = new PrismaClient(opts) 499 | this.prisma = customPrismaLog(prisma, this.logger) 500 | this.settings = merge(this.settings, { database: { table: options?.table.name } }) 501 | }, 502 | async started() { 503 | try { 504 | await this.prisma.$connect() 505 | this.logger.debug(`Connected to database ${env('DATABASE_HOST')}:${env('DATABASE_PORT')}`) 506 | } catch (e) { 507 | throw new Errors.MoleculerServerError(`Unable to connect to database: ${env('DATABASE_HOST')}:${env('DATABASE_PORT')}`, e.message) 508 | } 509 | }, 510 | async stopped() { 511 | if (this.prisma) { 512 | this.logger.info('Closing prisma connection...') 513 | await this.prisma.$disconnect() 514 | this.prisma = null 515 | } 516 | }, 517 | } 518 | } 519 | -------------------------------------------------------------------------------- /packages/backend/graphql/schema-dev.graphql: -------------------------------------------------------------------------------- 1 | # An object with a globally unique `ID`. 2 | interface Node { 3 | # A globally unique identifier. Can be used in various places throughout the system to identify this single value. 4 | nodeId: ID! 5 | } 6 | 7 | # A connection to a list of `Config` values. 8 | type ConfigsConnection { 9 | # A list of `Config` objects. 10 | nodes: [Config]! 11 | 12 | # A list of edges which contains the `Config` and cursor to aid in pagination. 13 | edges: [ConfigsEdge!]! 14 | 15 | # Information to aid in pagination. 16 | pageInfo: PageInfo! 17 | 18 | # The count of *all* `Config` you could get from the connection. 19 | totalCount: Int! 20 | } 21 | 22 | type Config implements Node { 23 | # A globally unique identifier. Can be used in various places throughout the system to identify this single value. 24 | nodeId: ID! 25 | key: String! 26 | value: String! 27 | isDefault: Boolean! 28 | createdAt: Datetime 29 | updatedAt: Datetime 30 | } 31 | 32 | # A point in time as described by the [ISO 33 | # 8601](https://en.wikipedia.org/wiki/ISO_8601) standard. May or may not include a timezone. 34 | scalar Datetime 35 | 36 | # A `Config` edge in the connection. 37 | type ConfigsEdge { 38 | # A cursor for use in pagination. 39 | cursor: Cursor 40 | 41 | # The `Config` at the end of the edge. 42 | node: Config 43 | } 44 | 45 | # A location in a connection that can be used for resuming pagination. 46 | scalar Cursor 47 | 48 | # Information about pagination in a connection. 49 | type PageInfo { 50 | # When paginating forwards, are there more items? 51 | hasNextPage: Boolean! 52 | 53 | # When paginating backwards, are there more items? 54 | hasPreviousPage: Boolean! 55 | 56 | # When paginating backwards, the cursor to continue. 57 | startCursor: Cursor 58 | 59 | # When paginating forwards, the cursor to continue. 60 | endCursor: Cursor 61 | } 62 | 63 | # Methods to use when ordering `Config`. 64 | enum ConfigsOrderBy { 65 | NATURAL 66 | KEY_ASC 67 | KEY_DESC 68 | VALUE_ASC 69 | VALUE_DESC 70 | IS_DEFAULT_ASC 71 | IS_DEFAULT_DESC 72 | CREATED_AT_ASC 73 | CREATED_AT_DESC 74 | UPDATED_AT_ASC 75 | UPDATED_AT_DESC 76 | PRIMARY_KEY_ASC 77 | PRIMARY_KEY_DESC 78 | } 79 | 80 | # A condition to be used against `Config` object types. All fields are tested for equality and combined with a logical ‘and.’ 81 | input ConfigCondition { 82 | # Checks for equality with the object’s `key` field. 83 | key: String 84 | 85 | # Checks for equality with the object’s `value` field. 86 | value: String 87 | 88 | # Checks for equality with the object’s `isDefault` field. 89 | isDefault: Boolean 90 | 91 | # Checks for equality with the object’s `createdAt` field. 92 | createdAt: Datetime 93 | 94 | # Checks for equality with the object’s `updatedAt` field. 95 | updatedAt: Datetime 96 | } 97 | 98 | # A filter to be used against `Config` object types. All fields are combined with a logical ‘and.’ 99 | input ConfigFilter { 100 | # Filter by the object’s `key` field. 101 | key: StringFilter 102 | 103 | # Filter by the object’s `value` field. 104 | value: StringFilter 105 | 106 | # Filter by the object’s `isDefault` field. 107 | isDefault: BooleanFilter 108 | 109 | # Filter by the object’s `createdAt` field. 110 | createdAt: DatetimeFilter 111 | 112 | # Filter by the object’s `updatedAt` field. 113 | updatedAt: DatetimeFilter 114 | 115 | # Checks for all expressions in this list. 116 | and: [ConfigFilter!] 117 | 118 | # Checks for any expressions in this list. 119 | or: [ConfigFilter!] 120 | 121 | # Negates the expression. 122 | not: ConfigFilter 123 | } 124 | 125 | # A filter to be used against String fields. All fields are combined with a logical ‘and.’ 126 | input StringFilter { 127 | # Is null (if `true` is specified) or is not null (if `false` is specified). 128 | isNull: Boolean 129 | 130 | # Equal to the specified value. 131 | equalTo: String 132 | 133 | # Not equal to the specified value. 134 | notEqualTo: String 135 | 136 | # Not equal to the specified value, treating null like an ordinary value. 137 | distinctFrom: String 138 | 139 | # Equal to the specified value, treating null like an ordinary value. 140 | notDistinctFrom: String 141 | 142 | # Included in the specified list. 143 | in: [String!] 144 | 145 | # Not included in the specified list. 146 | notIn: [String!] 147 | 148 | # Less than the specified value. 149 | lessThan: String 150 | 151 | # Less than or equal to the specified value. 152 | lessThanOrEqualTo: String 153 | 154 | # Greater than the specified value. 155 | greaterThan: String 156 | 157 | # Greater than or equal to the specified value. 158 | greaterThanOrEqualTo: String 159 | 160 | # Contains the specified string (case-sensitive). 161 | includes: String 162 | 163 | # Does not contain the specified string (case-sensitive). 164 | notIncludes: String 165 | 166 | # Contains the specified string (case-insensitive). 167 | includesInsensitive: String 168 | 169 | # Does not contain the specified string (case-insensitive). 170 | notIncludesInsensitive: String 171 | 172 | # Starts with the specified string (case-sensitive). 173 | startsWith: String 174 | 175 | # Does not start with the specified string (case-sensitive). 176 | notStartsWith: String 177 | 178 | # Starts with the specified string (case-insensitive). 179 | startsWithInsensitive: String 180 | 181 | # Does not start with the specified string (case-insensitive). 182 | notStartsWithInsensitive: String 183 | 184 | # Ends with the specified string (case-sensitive). 185 | endsWith: String 186 | 187 | # Does not end with the specified string (case-sensitive). 188 | notEndsWith: String 189 | 190 | # Ends with the specified string (case-insensitive). 191 | endsWithInsensitive: String 192 | 193 | # Does not end with the specified string (case-insensitive). 194 | notEndsWithInsensitive: String 195 | 196 | # Matches the specified pattern (case-sensitive). An underscore (_) matches any single character; a percent sign (%) matches any sequence of zero or more characters. 197 | like: String 198 | 199 | # Does not match the specified pattern (case-sensitive). An underscore (_) matches any single character; a percent sign (%) matches any sequence of zero or more characters. 200 | notLike: String 201 | 202 | # Matches the specified pattern (case-insensitive). An underscore (_) matches any single character; a percent sign (%) matches any sequence of zero or more characters. 203 | likeInsensitive: String 204 | 205 | # Does not match the specified pattern (case-insensitive). An underscore (_) matches any single character; a percent sign (%) matches any sequence of zero or more characters. 206 | notLikeInsensitive: String 207 | 208 | # Equal to the specified value (case-insensitive). 209 | equalToInsensitive: String 210 | 211 | # Not equal to the specified value (case-insensitive). 212 | notEqualToInsensitive: String 213 | 214 | # Not equal to the specified value, treating null like an ordinary value (case-insensitive). 215 | distinctFromInsensitive: String 216 | 217 | # Equal to the specified value, treating null like an ordinary value (case-insensitive). 218 | notDistinctFromInsensitive: String 219 | 220 | # Included in the specified list (case-insensitive). 221 | inInsensitive: [String!] 222 | 223 | # Not included in the specified list (case-insensitive). 224 | notInInsensitive: [String!] 225 | 226 | # Less than the specified value (case-insensitive). 227 | lessThanInsensitive: String 228 | 229 | # Less than or equal to the specified value (case-insensitive). 230 | lessThanOrEqualToInsensitive: String 231 | 232 | # Greater than the specified value (case-insensitive). 233 | greaterThanInsensitive: String 234 | 235 | # Greater than or equal to the specified value (case-insensitive). 236 | greaterThanOrEqualToInsensitive: String 237 | } 238 | 239 | # A filter to be used against Boolean fields. All fields are combined with a logical ‘and.’ 240 | input BooleanFilter { 241 | # Is null (if `true` is specified) or is not null (if `false` is specified). 242 | isNull: Boolean 243 | 244 | # Equal to the specified value. 245 | equalTo: Boolean 246 | 247 | # Not equal to the specified value. 248 | notEqualTo: Boolean 249 | 250 | # Not equal to the specified value, treating null like an ordinary value. 251 | distinctFrom: Boolean 252 | 253 | # Equal to the specified value, treating null like an ordinary value. 254 | notDistinctFrom: Boolean 255 | 256 | # Included in the specified list. 257 | in: [Boolean!] 258 | 259 | # Not included in the specified list. 260 | notIn: [Boolean!] 261 | 262 | # Less than the specified value. 263 | lessThan: Boolean 264 | 265 | # Less than or equal to the specified value. 266 | lessThanOrEqualTo: Boolean 267 | 268 | # Greater than the specified value. 269 | greaterThan: Boolean 270 | 271 | # Greater than or equal to the specified value. 272 | greaterThanOrEqualTo: Boolean 273 | } 274 | 275 | # A filter to be used against Datetime fields. All fields are combined with a logical ‘and.’ 276 | input DatetimeFilter { 277 | # Is null (if `true` is specified) or is not null (if `false` is specified). 278 | isNull: Boolean 279 | 280 | # Equal to the specified value. 281 | equalTo: Datetime 282 | 283 | # Not equal to the specified value. 284 | notEqualTo: Datetime 285 | 286 | # Not equal to the specified value, treating null like an ordinary value. 287 | distinctFrom: Datetime 288 | 289 | # Equal to the specified value, treating null like an ordinary value. 290 | notDistinctFrom: Datetime 291 | 292 | # Included in the specified list. 293 | in: [Datetime!] 294 | 295 | # Not included in the specified list. 296 | notIn: [Datetime!] 297 | 298 | # Less than the specified value. 299 | lessThan: Datetime 300 | 301 | # Less than or equal to the specified value. 302 | lessThanOrEqualTo: Datetime 303 | 304 | # Greater than the specified value. 305 | greaterThan: Datetime 306 | 307 | # Greater than or equal to the specified value. 308 | greaterThanOrEqualTo: Datetime 309 | } 310 | 311 | # A connection to a list of `Profile` values. 312 | type ProfilesConnection { 313 | # A list of `Profile` objects. 314 | nodes: [Profile]! 315 | 316 | # A list of edges which contains the `Profile` and cursor to aid in pagination. 317 | edges: [ProfilesEdge!]! 318 | 319 | # Information to aid in pagination. 320 | pageInfo: PageInfo! 321 | 322 | # The count of *all* `Profile` you could get from the connection. 323 | totalCount: Int! 324 | } 325 | 326 | type Profile implements Node { 327 | # A globally unique identifier. Can be used in various places throughout the system to identify this single value. 328 | nodeId: ID! 329 | id: Int! 330 | userId: String! 331 | email: String 332 | displayName: String 333 | photoUrl: String 334 | address: String 335 | } 336 | 337 | # A `Profile` edge in the connection. 338 | type ProfilesEdge { 339 | # A cursor for use in pagination. 340 | cursor: Cursor 341 | 342 | # The `Profile` at the end of the edge. 343 | node: Profile 344 | } 345 | 346 | # Methods to use when ordering `Profile`. 347 | enum ProfilesOrderBy { 348 | NATURAL 349 | ID_ASC 350 | ID_DESC 351 | USER_ID_ASC 352 | USER_ID_DESC 353 | EMAIL_ASC 354 | EMAIL_DESC 355 | DISPLAY_NAME_ASC 356 | DISPLAY_NAME_DESC 357 | PHOTO_URL_ASC 358 | PHOTO_URL_DESC 359 | ADDRESS_ASC 360 | ADDRESS_DESC 361 | PRIMARY_KEY_ASC 362 | PRIMARY_KEY_DESC 363 | } 364 | 365 | # A condition to be used against `Profile` object types. All fields are tested for equality and combined with a logical ‘and.’ 366 | input ProfileCondition { 367 | # Checks for equality with the object’s `id` field. 368 | id: Int 369 | 370 | # Checks for equality with the object’s `userId` field. 371 | userId: String 372 | 373 | # Checks for equality with the object’s `email` field. 374 | email: String 375 | 376 | # Checks for equality with the object’s `displayName` field. 377 | displayName: String 378 | 379 | # Checks for equality with the object’s `photoUrl` field. 380 | photoUrl: String 381 | 382 | # Checks for equality with the object’s `address` field. 383 | address: String 384 | } 385 | 386 | # A filter to be used against `Profile` object types. All fields are combined with a logical ‘and.’ 387 | input ProfileFilter { 388 | # Filter by the object’s `id` field. 389 | id: IntFilter 390 | 391 | # Filter by the object’s `userId` field. 392 | userId: StringFilter 393 | 394 | # Filter by the object’s `email` field. 395 | email: StringFilter 396 | 397 | # Filter by the object’s `displayName` field. 398 | displayName: StringFilter 399 | 400 | # Filter by the object’s `photoUrl` field. 401 | photoUrl: StringFilter 402 | 403 | # Filter by the object’s `address` field. 404 | address: StringFilter 405 | 406 | # Checks for all expressions in this list. 407 | and: [ProfileFilter!] 408 | 409 | # Checks for any expressions in this list. 410 | or: [ProfileFilter!] 411 | 412 | # Negates the expression. 413 | not: ProfileFilter 414 | } 415 | 416 | # A filter to be used against Int fields. All fields are combined with a logical ‘and.’ 417 | input IntFilter { 418 | # Is null (if `true` is specified) or is not null (if `false` is specified). 419 | isNull: Boolean 420 | 421 | # Equal to the specified value. 422 | equalTo: Int 423 | 424 | # Not equal to the specified value. 425 | notEqualTo: Int 426 | 427 | # Not equal to the specified value, treating null like an ordinary value. 428 | distinctFrom: Int 429 | 430 | # Equal to the specified value, treating null like an ordinary value. 431 | notDistinctFrom: Int 432 | 433 | # Included in the specified list. 434 | in: [Int!] 435 | 436 | # Not included in the specified list. 437 | notIn: [Int!] 438 | 439 | # Less than the specified value. 440 | lessThan: Int 441 | 442 | # Less than or equal to the specified value. 443 | lessThanOrEqualTo: Int 444 | 445 | # Greater than the specified value. 446 | greaterThan: Int 447 | 448 | # Greater than or equal to the specified value. 449 | greaterThanOrEqualTo: Int 450 | } 451 | 452 | # The output of our create `Config` mutation. 453 | type CreateConfigPayload { 454 | # The exact same `clientMutationId` that was provided in the mutation input, 455 | # unchanged and unused. May be used by a client to track mutations. 456 | clientMutationId: String 457 | 458 | # The `Config` that was created by this mutation. 459 | config: Config 460 | 461 | # Our root query field type. Allows us to run any query from our mutation payload. 462 | query: Query 463 | 464 | # An edge for our `Config`. May be used by Relay 1. 465 | configEdge( 466 | # The method to use when ordering `Config`. 467 | orderBy: [ConfigsOrderBy!] = [PRIMARY_KEY_ASC] 468 | ): ConfigsEdge 469 | } 470 | 471 | # All input for the create `Config` mutation. 472 | input CreateConfigInput { 473 | # An arbitrary string value with no semantic meaning. Will be included in the 474 | # payload verbatim. May be used to track mutations by the client. 475 | clientMutationId: String 476 | 477 | # The `Config` to be created by this mutation. 478 | config: ConfigInput! 479 | } 480 | 481 | # An input for mutations affecting `Config` 482 | input ConfigInput { 483 | key: String! 484 | value: String! 485 | isDefault: Boolean 486 | createdAt: Datetime 487 | updatedAt: Datetime 488 | } 489 | 490 | # The output of our create `Profile` mutation. 491 | type CreateProfilePayload { 492 | # The exact same `clientMutationId` that was provided in the mutation input, 493 | # unchanged and unused. May be used by a client to track mutations. 494 | clientMutationId: String 495 | 496 | # The `Profile` that was created by this mutation. 497 | profile: Profile 498 | 499 | # Our root query field type. Allows us to run any query from our mutation payload. 500 | query: Query 501 | 502 | # An edge for our `Profile`. May be used by Relay 1. 503 | profileEdge( 504 | # The method to use when ordering `Profile`. 505 | orderBy: [ProfilesOrderBy!] = [PRIMARY_KEY_ASC] 506 | ): ProfilesEdge 507 | } 508 | 509 | # All input for the create `Profile` mutation. 510 | input CreateProfileInput { 511 | # An arbitrary string value with no semantic meaning. Will be included in the 512 | # payload verbatim. May be used to track mutations by the client. 513 | clientMutationId: String 514 | 515 | # The `Profile` to be created by this mutation. 516 | profile: ProfileInput! 517 | } 518 | 519 | # An input for mutations affecting `Profile` 520 | input ProfileInput { 521 | id: Int 522 | userId: String! 523 | email: String 524 | displayName: String 525 | photoUrl: String 526 | address: String 527 | } 528 | 529 | # The output of our update `Config` mutation. 530 | type UpdateConfigPayload { 531 | # The exact same `clientMutationId` that was provided in the mutation input, 532 | # unchanged and unused. May be used by a client to track mutations. 533 | clientMutationId: String 534 | 535 | # The `Config` that was updated by this mutation. 536 | config: Config 537 | 538 | # Our root query field type. Allows us to run any query from our mutation payload. 539 | query: Query 540 | 541 | # An edge for our `Config`. May be used by Relay 1. 542 | configEdge( 543 | # The method to use when ordering `Config`. 544 | orderBy: [ConfigsOrderBy!] = [PRIMARY_KEY_ASC] 545 | ): ConfigsEdge 546 | } 547 | 548 | # All input for the `updateConfig` mutation. 549 | input UpdateConfigInput { 550 | # An arbitrary string value with no semantic meaning. Will be included in the 551 | # payload verbatim. May be used to track mutations by the client. 552 | clientMutationId: String 553 | 554 | # The globally unique `ID` which will identify a single `Config` to be updated. 555 | nodeId: ID! 556 | 557 | # An object where the defined keys will be set on the `Config` being updated. 558 | configPatch: ConfigPatch! 559 | } 560 | 561 | # Represents an update to a `Config`. Fields that are set will be updated. 562 | input ConfigPatch { 563 | key: String 564 | value: String 565 | isDefault: Boolean 566 | createdAt: Datetime 567 | updatedAt: Datetime 568 | } 569 | 570 | # All input for the `updateConfigByKey` mutation. 571 | input UpdateConfigByKeyInput { 572 | # An arbitrary string value with no semantic meaning. Will be included in the 573 | # payload verbatim. May be used to track mutations by the client. 574 | clientMutationId: String 575 | 576 | # An object where the defined keys will be set on the `Config` being updated. 577 | configPatch: ConfigPatch! 578 | key: String! 579 | } 580 | 581 | # The output of our update `Profile` mutation. 582 | type UpdateProfilePayload { 583 | # The exact same `clientMutationId` that was provided in the mutation input, 584 | # unchanged and unused. May be used by a client to track mutations. 585 | clientMutationId: String 586 | 587 | # The `Profile` that was updated by this mutation. 588 | profile: Profile 589 | 590 | # Our root query field type. Allows us to run any query from our mutation payload. 591 | query: Query 592 | 593 | # An edge for our `Profile`. May be used by Relay 1. 594 | profileEdge( 595 | # The method to use when ordering `Profile`. 596 | orderBy: [ProfilesOrderBy!] = [PRIMARY_KEY_ASC] 597 | ): ProfilesEdge 598 | } 599 | 600 | # All input for the `updateProfile` mutation. 601 | input UpdateProfileInput { 602 | # An arbitrary string value with no semantic meaning. Will be included in the 603 | # payload verbatim. May be used to track mutations by the client. 604 | clientMutationId: String 605 | 606 | # The globally unique `ID` which will identify a single `Profile` to be updated. 607 | nodeId: ID! 608 | 609 | # An object where the defined keys will be set on the `Profile` being updated. 610 | profilePatch: ProfilePatch! 611 | } 612 | 613 | # Represents an update to a `Profile`. Fields that are set will be updated. 614 | input ProfilePatch { 615 | id: Int 616 | userId: String 617 | email: String 618 | displayName: String 619 | photoUrl: String 620 | address: String 621 | } 622 | 623 | # All input for the `updateProfileById` mutation. 624 | input UpdateProfileByIdInput { 625 | # An arbitrary string value with no semantic meaning. Will be included in the 626 | # payload verbatim. May be used to track mutations by the client. 627 | clientMutationId: String 628 | 629 | # An object where the defined keys will be set on the `Profile` being updated. 630 | profilePatch: ProfilePatch! 631 | id: Int! 632 | } 633 | 634 | # The output of our delete `Config` mutation. 635 | type DeleteConfigPayload { 636 | # The exact same `clientMutationId` that was provided in the mutation input, 637 | # unchanged and unused. May be used by a client to track mutations. 638 | clientMutationId: String 639 | 640 | # The `Config` that was deleted by this mutation. 641 | config: Config 642 | deletedConfigId: ID 643 | 644 | # Our root query field type. Allows us to run any query from our mutation payload. 645 | query: Query 646 | 647 | # An edge for our `Config`. May be used by Relay 1. 648 | configEdge( 649 | # The method to use when ordering `Config`. 650 | orderBy: [ConfigsOrderBy!] = [PRIMARY_KEY_ASC] 651 | ): ConfigsEdge 652 | } 653 | 654 | # All input for the `deleteConfig` mutation. 655 | input DeleteConfigInput { 656 | # An arbitrary string value with no semantic meaning. Will be included in the 657 | # payload verbatim. May be used to track mutations by the client. 658 | clientMutationId: String 659 | 660 | # The globally unique `ID` which will identify a single `Config` to be deleted. 661 | nodeId: ID! 662 | } 663 | 664 | # All input for the `deleteConfigByKey` mutation. 665 | input DeleteConfigByKeyInput { 666 | # An arbitrary string value with no semantic meaning. Will be included in the 667 | # payload verbatim. May be used to track mutations by the client. 668 | clientMutationId: String 669 | key: String! 670 | } 671 | 672 | # The output of our delete `Profile` mutation. 673 | type DeleteProfilePayload { 674 | # The exact same `clientMutationId` that was provided in the mutation input, 675 | # unchanged and unused. May be used by a client to track mutations. 676 | clientMutationId: String 677 | 678 | # The `Profile` that was deleted by this mutation. 679 | profile: Profile 680 | deletedProfileId: ID 681 | 682 | # Our root query field type. Allows us to run any query from our mutation payload. 683 | query: Query 684 | 685 | # An edge for our `Profile`. May be used by Relay 1. 686 | profileEdge( 687 | # The method to use when ordering `Profile`. 688 | orderBy: [ProfilesOrderBy!] = [PRIMARY_KEY_ASC] 689 | ): ProfilesEdge 690 | } 691 | 692 | # All input for the `deleteProfile` mutation. 693 | input DeleteProfileInput { 694 | # An arbitrary string value with no semantic meaning. Will be included in the 695 | # payload verbatim. May be used to track mutations by the client. 696 | clientMutationId: String 697 | 698 | # The globally unique `ID` which will identify a single `Profile` to be deleted. 699 | nodeId: ID! 700 | } 701 | 702 | # All input for the `deleteProfileById` mutation. 703 | input DeleteProfileByIdInput { 704 | # An arbitrary string value with no semantic meaning. Will be included in the 705 | # payload verbatim. May be used to track mutations by the client. 706 | clientMutationId: String 707 | id: Int! 708 | } 709 | --------------------------------------------------------------------------------