`export * from "./${filePath.split("src/")[1].replace(".ts","")}"\n`)).join("")}exports.GenerateEntityIndexFile=GenerateEntityIndexFile,GenerateEntityIndexFile.paths=[["generate-entity-index-file"]],GenerateEntityIndexFile.usage=clipanion_1.Command.Usage({category:"generators",description:"This script will generate index file for the model library.",examples:[["A basic example","npm run stator-cli generate-entity-index-file"]]})},28:(__unused_webpack_module,exports,__webpack_require__)=>{Object.defineProperty(exports,"__esModule",{value:!0}),exports.RenameProject=void 0;const tslib_1=__webpack_require__(752),fs_1=(0,tslib_1.__importDefault)(__webpack_require__(147)),path_1=(0,tslib_1.__importDefault)(__webpack_require__(17)),clipanion_1=__webpack_require__(638),lodash_1=__webpack_require__(517),utils_1=__webpack_require__(733);class RenameProject extends clipanion_1.Command{constructor(){super(...arguments),this.organization=clipanion_1.Option.String("--organization",{required:!0}),this.project=clipanion_1.Option.String("--project",{required:!0})}async execute(){await this.renameProject()}async renameProject(){try{/^[a-zA-Z-\d_]+$/gim.test(this.organization)||(console.error("The organization name must respect this regex /^[a-zA-Z-\\d_]+$/gmi"),process.exit(1));/^[a-zA-Z-\d_]+$/gim.test(this.project)||(console.error("The project name must respect this regex /^[a-zA-Z-\\d_]+$/gmi"),process.exit(1));const databaseName=this.project.replace(/-/g,"_"),databaseFiles=["docker-compose.yml","seed-data.js","init.sql","test.ts","orm-config.ts"],camelCaseProjectName=(0,lodash_1.camelCase)(this.project),ignoredFolders=["node_modules","dist",".git",".idea",".cache"];for await(const entry of(0,utils_1.walk)(path_1.default.join(__dirname,"../"),ignoredFolders)){if((await fs_1.default.promises.lstat(entry)).isFile()){const fileContent=await fs_1.default.promises.readFile(entry,"utf-8");if(fileContent){const isDatabaseFile=databaseFiles.some((databaseFile=>entry.includes(databaseFile))),replacedFileContent=fileContent.replace(/chocolat-chaud-io/gim,this.organization).replace(/stator/gim,isDatabaseFile?databaseName:camelCaseProjectName);await fs_1.default.promises.writeFile(entry,replacedFileContent,"utf-8")}}}console.info("This is now YOUR project provided generously by:\n\n███████ ████████ █████ ████████ ██████ ██████ \n██ ██ ██ ██ ██ ██ ██ ██ ██ \n███████ ██ ███████ ██ ██ ██ ██████ \n ██ ██ ██ ██ ██ ██ ██ ██ ██ \n███████ ██ ██ ██ ██ ██████ ██ ██ \n \n ")}catch(error){console.error(error)}}}exports.RenameProject=RenameProject,RenameProject.paths=[["rename-project"]],RenameProject.usage=clipanion_1.Command.Usage({category:"getting-started",description:"This script will rename all occurrences of stator and chocolat-chaud with your own names.",examples:[["A basic example","npm run stator-cli rename-project --organization chocolat-chaud-io --project stator"]]})},733:(__unused_webpack_module,exports,__webpack_require__)=>{Object.defineProperty(exports,"__esModule",{value:!0}),exports.walk=void 0;const tslib_1=__webpack_require__(752),fs_1=(0,tslib_1.__importDefault)(__webpack_require__(147)),path_1=(0,tslib_1.__importDefault)(__webpack_require__(17));exports.walk=async function*(dir,ignoredPaths,walkedFolderNames=[]){for await(const directoryEntry of await fs_1.default.promises.opendir(dir)){const entryPath=path_1.default.join(dir,directoryEntry.name);directoryEntry.isDirectory()&&!ignoredPaths.includes(directoryEntry.name)?(walkedFolderNames.push(entryPath),yield*(0,exports.walk)(entryPath,ignoredPaths,walkedFolderNames)):directoryEntry.isFile()&&(yield entryPath)}}},638:module=>{"use strict";module.exports=require("clipanion")},517:module=>{"use strict";module.exports=require("lodash")},897:module=>{"use strict";module.exports=require("lodash/camelCase")},969:module=>{"use strict";module.exports=require("lodash/capitalize")},546:module=>{"use strict";module.exports=require("lodash/kebabCase")},752:module=>{"use strict";module.exports=require("tslib")},147:module=>{"use strict";module.exports=require("fs")},17:module=>{"use strict";module.exports=require("path")}},__webpack_module_cache__={};function __webpack_require__(moduleId){var cachedModule=__webpack_module_cache__[moduleId];if(void 0!==cachedModule)return cachedModule.exports;var module=__webpack_module_cache__[moduleId]={exports:{}};return __webpack_modules__[moduleId](module,module.exports,__webpack_require__),module.exports}var __webpack_exports__={};(()=>{var exports=__webpack_exports__;Object.defineProperty(exports,"__esModule",{value:!0});const clipanion_1=__webpack_require__(638),enforce_file_folder_naming_convention_1=__webpack_require__(857),enforce_valid_imports_api_1=__webpack_require__(351),generate_cache_key_file_1=__webpack_require__(744),generate_entity_index_file_1=__webpack_require__(999),rename_project_1=__webpack_require__(28),[,,...args]=process.argv,cli=new clipanion_1.Cli({binaryLabel:"stator-cli",binaryName:"npm run stator-cli",binaryVersion:"1.0.0"});cli.register(rename_project_1.RenameProject),cli.register(generate_cache_key_file_1.GenerateCacheKeyFile),cli.register(generate_entity_index_file_1.GenerateEntityIndexFile),cli.register(enforce_valid_imports_api_1.EnforceValidImportsApi),cli.register(enforce_file_folder_naming_convention_1.EnforceFileFolderNamingConvention),cli.register(clipanion_1.Builtins.HelpCommand),cli.runExit(args).catch(console.error)})();var __webpack_export_target__=exports;for(var i in __webpack_exports__)__webpack_export_target__[i]=__webpack_exports__[i];__webpack_exports__.__esModule&&Object.defineProperty(__webpack_export_target__,"__esModule",{value:!0})})();
2 | //# sourceMappingURL=main.js.map
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 |
2 |
Stator
3 |
4 |
5 | Stator, your go-to template for the perfect stack.
6 |
7 |
8 |
9 |
44 |
45 |
46 |
47 | ## 🚀 Quick Start
48 |
49 | The interactive CLI will guide you to easily setup your project.
50 |
51 | ```
52 | npm run get-started
53 | ```
54 |
55 |
56 |
57 | ## 📋 Table of Contents
58 |
59 | - [About the Project](#-about-the-project)
60 | - [Demo Application](#-demo-application)
61 | - [Technical Stack](#technical-stack)
62 | - [Getting Started](#-getting-started)
63 | - [Prerequisites](#prerequisites)
64 | - [Copy the template](#copy-the-template)
65 | - [Make it yours](#make-it-yours)
66 | - [Run the application](#run-the-application)
67 | - [Continuous Integration](#continuous-integration)
68 | - [Deployment](#deployment)
69 | - [Digital Ocean App Platform](#digital-ocean-app-platform)
70 | - [Kubernetes](#kubernetes)
71 | - [Implementation](#%EF%B8%8F-implementation)
72 | - [Database](#database)
73 | - [Postgres](#postgres)
74 | - [Mongo](#mongo-not-recommended)
75 | - [Data seeding](#data-seeding)
76 | - [Backend](#backend)
77 | - [Frontend](#frontend)
78 | - [General](#general)
79 |
80 |
81 |
82 | ## 📚 About the Project
83 |
84 | Have you ever started a new project by yourself?
85 | If so, you probably know that it is tedious to set up all the necessary tools.
86 | Just like you, the part I enjoy the most is coding, not boilerplate.
87 |
88 | Say hi to stator, a full-stack [TypeScript](https://github.com/microsoft/TypeScript) template that enforces conventions, handles releases, automates deployments and much more!
89 |
90 | If you want more details about how this idea was implemented, I recommend reading the [series of blog articles](https://yann510.hashnode.dev/creating-the-modern-developer-stack-template-part-1-ckfl56axy02e85ds18pa26a6z) I wrote on the topic.
91 |
92 |
93 |
94 | ## 🦄 [Demo Application](https://www.stator.dev)
95 |
96 | This template includes a demo **todo application** that serves as an example of sound patterns.
97 | Of course, you won't be creating a todo application for your project, but you can use this as an example of useful patterns and learn how to use the technologies presented in this project.
98 |
99 | 
100 |
101 | ### Technical Stack
102 |
103 | For a detailed list of all those technologies, you can read this [blog article](https://yann510.hashnode.dev/stator-a-full-stack-template-releases-deployments-enforced-conventions-ckhmnyhr903us9ms1b20lgi3b).
104 |
105 | | Deployment | Database | Backend | Frontend | Testing | Conventions |
106 | | -------------------------------------------------------------------------------- | ------------------------------------------------ | ------------------------------------------------------------- | ------------------------------------------------------------- | -------------------------------------------------------------------------------- | -------------------------------------------------------------------------------- |
107 | | [DigitalOcean App Platform](https://www.digitalocean.com/products/app-platform/) | [Postgres](https://github.com/postgres/postgres) | [Nest](https://github.com/nestjs/nest) | [React](https://github.com/facebook/react) | [jest](https://github.com/facebook/jest) | [commitlint](https://github.com/conventional-changelog/commitlint) |
108 | | [semantic-release](https://github.com/semantic-release/semantic-release) | [Mongo](https://github.com/mongodb/mongo) | [Fastify](https://github.com/fastify/fastify) | [React Router](https://github.com/ReactTraining/react-router) | [cypress](https://github.com/cypress-io/cypress) | [eslint](https://github.com/eslint/eslint) |
109 | | [docker-compose](https://github.com/docker/compose) | [TypeORM](https://github.com/typeorm/typeorm) | [Swagger](https://github.com/nestjs/swagger) | [Redux](https://github.com/reduxjs/redux) | | [prettier](https://github.com/prettier/prettier) |
110 | | | [NestJs CRUD](https://github.com/nestjsx/crud) | [ReDoc](https://github.com/Redocly/redoc) | [Redux Toolkit](https://github.com/reduxjs/redux-toolkit) | | |
111 | | | | | [Material UI](https://github.com/mui-org/material-ui) | | |
112 |
113 |
114 |
115 | ## 💥 Getting Started
116 |
117 | ### Prerequisites
118 |
119 | - [Docker Compose](https://docs.docker.com/compose/install/)
120 | - [node.js](https://nodejs.org/en/download/) v14.x
121 |
122 | ### Copy the template
123 |
124 | This repository is a repository template, which means you can use the `Use this template` button at the top to create your project based on this.
125 |
126 | 
127 |
128 | \*Note: If you have an existing repository, this will require more work. I would recommend using the `use template button` and migrating your current code to the newly created projects.
129 |
130 | ### Make it yours
131 |
132 | You will now want to make this project yours by replacing all organization and project naming occurrences with your own names.
133 | Thankfully, we have a script just for that:
134 |
135 | ```
136 | npm run rename-project -- --organization {YOUR_ORGANIZATION_NAME} --project {YOUR_PROJECT_NAME}
137 | ```
138 |
139 | \*Note: I highly recommend that the project name is the same as your git repository.
140 |
141 | On completion, you will see the following message:
142 |
143 | 
144 |
145 | ### Run the application
146 |
147 | First, install the dependencies:
148 |
149 | ```
150 | npm i
151 | ```
152 |
153 | Then, run the whole stack:
154 |
155 | ```
156 | npm run postgres
157 | ```
158 |
159 | ```
160 | npm start api
161 | ```
162 |
163 | ```
164 | npm start webapp
165 | ```
166 |
167 | Finally, why not test it:
168 |
169 | ```
170 | npm test api && npm run e2e webapp-e2e
171 | ```
172 |
173 | For a full list of available commands, consult the `package.json`.
174 |
175 | ### Continuous Integration
176 |
177 | This templates integrates Github Actions for its Continuous Integration. The existing workflows are under `.github/workflows`.
178 | Currently, the CI will ensure all your apps work properly, by building and testing.
179 | For your pull requests, it will create a review application which will basically host your whole stack on a VM.
180 | Once everything is ready a new comment will be added to your pull request with the deployment URL.
181 | When the PR is closed, your review app will be destroyed as it's purpose will have been served.
182 | It's sacrifice will be for the greater good and also your wallet.
183 | To have the CI working, you must:
184 |
185 | 1. (Optional) If you want review apps to work, you should follow the instruction provided by the `get-started` CLI.
186 | 2. (Optional) Link your repository with [Codecov](https://github.com/apps/codecov) by inserting your `CODECOV_TOKEN` in github secrets.
187 | 3. (Optional) Insert your [Nx Cloud](https://nx.app/) access token in github secrets under `NX_CLOUD_TOKEN`. This enables for caching and faster build times.
188 |
189 | ### Deployment
190 |
191 | The application can be deployed in two different ways, depending on your objectives.
192 |
193 | #### Digital Ocean App Platform
194 |
195 | For a simple and fast deployment, the new [App Platform](https://www.digitalocean.com/docs/app-platform/) from Digital Ocean makes it easy to work with monorepos. For our todo app, the config file lies under `.do/app.yaml`. There, you can change the configuration of the different apps being deployed. [The spec can be found here.](https://www.digitalocean.com/docs/app-platform/references/app-specification-reference/)
196 |
197 | To deploy this full stack application yourself, follow the steps below:
198 |
199 | 1. Create an account on [Digital Ocean Cloud](https://m.do.co/c/67f72eccb557) (this is a sponsored link) and enable Github access
200 | 1. Install [doctl CLI](https://www.digitalocean.com/docs/apis-clis/doctl/how-to/install/)
201 | 1. Run `doctl apps create --spec .do/app.yaml`
202 | 1. View the build, logs, and deployment url [here](https://cloud.digitalocean.com/apps)
203 |
204 | Once done, your app will be hooked to master branch commits as defined in the spec. Therefore, on merge, the application will update. To update the spec of the application, first get the application id with `doctl apps list`, then simply run `doctl apps update --spec .do/app.yaml`.
205 |
206 |
207 |
208 | ## ⚙️ Implementation
209 |
210 | ### Database
211 |
212 | #### Postgres
213 |
214 | There are 2 databases available, postgres and mongo.
215 | To ensure your developers don't get into any trouble while installing those, they are already pre-configured with `docker-compose.yml` files.
216 |
217 | **By default, the project uses postgres.**
218 | If this is what you want, you're good to go; everything will work out of the box.
219 |
220 | #### Migrations
221 |
222 | By default, the automatic synchronization is activated between your models and the database.
223 | This means that making changes on your models will be automatically reflected on your database schemas.
224 | If you would like to control your migrations manually, you can do so by setting `synchronize` to false in `orm-config.ts` file.
225 |
226 | Generate migration from your modified schemas:
227 |
228 | ```
229 | npm run typeorm -- migration:generate -n {MIGRATION_NAME}
230 | ```
231 | This will check the difference between models for your defined entities and your database schemas.
232 | If it finds changes, it will generate the appropriate migration scripts.
233 |
234 | Run all pending migrations:
235 |
236 | ```
237 | npm run typeorm -- migration:run
238 | ```
239 |
240 | To get all the information on migrations, consult [typeorm documentation](https://github.com/typeorm/typeorm/blob/master/docs/migrations.md).
241 |
242 | #### Mongo [NOT RECOMMENDED]
243 |
244 | If you would like to use mongodb, even though it is absolutely not recommended because it currently doesn't work well with [typeorm](https://github.com/typeorm/typeorm), you can still do that by updating the connection info under `./apps/api/src/config/configuration.ts`.
245 | You simply need to replace `type: "postgres"` with `type: "mongo"`.
246 | Make sure you run the mongo container using the command: `npm run mongo`.
247 |
248 | #### Data seeding
249 |
250 | If you want your database to be pre-populated with that, it is very easy to do so.
251 | For postgres add your `sql` statements to `apps/database/postgres/init.sql` file.
252 | For mongo add your mongo statements to `apps/database/mongo/mongo-entrypoint/seed-data.js` file.
253 |
254 | ### Backend
255 |
256 | We are using cutting edge technologies to ensure that you get the best development experience one could hope for.
257 | To communicate with the database, we make use of the great [typeorm](https://github.com/typeorm/typeorm).
258 | We use the code-first approach, which means defining your models will also represent your tables in your database.
259 | Here is an example:
260 |
261 | ```typescript
262 | import { Column, Entity } from "typeorm"
263 | import { RootEntity } from "./root.entity"
264 | import { MinLength } from "class-validator"
265 |
266 | @Entity()
267 | export class Todo extends RootEntity {
268 | @Column()
269 | @MinLength(5, { always: true })
270 | text: string
271 | }
272 | ```
273 |
274 | To serve your API requests, we make use of [nest](https://github.com/nestjs/nest) alongside with [fastify](https://github.com/fastify/fastify) to ensure blazing fast [performance](https://github.com/fastify/fastify#benchmarks).
275 |
276 | To reduce the boilerplate commonly found around creating a new entity, we are using the [nestjsx/crud](https://github.com/nestjsx/crud) plugin that will generate all necessary routes for CRUD operations.
277 |
278 | Here is an example from our todo app:
279 |
280 | ```typescript
281 | import { Controller } from "@nestjs/common"
282 | import { Crud, CrudController } from "@nestjsx/crud"
283 | import { Todo } from "@stator/models"
284 |
285 | import { TodosService } from "./todos.service"
286 |
287 | @Crud({ model: { type: Todo } })
288 | @Controller("todos")
289 | export class TodosController implements CrudController {
290 | constructor(public service: TodosService) {}
291 | }
292 | ```
293 |
294 | Of course, you're probably wondering if this actually works.
295 | To convince you, we have implemented integration tests that perform real requests using [supertest](https://github.com/visionmedia/supertest).
296 |
297 | **Can I view the generated endpoints?** Well, of course, you can!
298 |
299 | We now have generated [swagger documentation](https://github.com/fastify/fastify-swagger) that is viewable with the beautiful [redoc](https://github.com/Redocly/redoc).
300 |
301 | Once you navigate to [localhost:3333](http://localhost:3333), you will see this:
302 |
303 | 
304 |
305 | ### Frontend
306 |
307 | For our webapp, we're using the very popular [react](https://github.com/facebook/react) alongside [redux-toolkit](https://github.com/reduxjs/redux-toolkit) and [react-router](https://github.com/ReactTraining/react-router).
308 | We highly recommend that you use [function components](https://reactjs.org/docs/components-and-props.html) as demonstrated in the example.
309 |
310 | To further reduce the boilerplate necessary you can generate hooks based on your API swagger by running `npm run generate-api-redux`.
311 | When you add new entities to your API, you should also add them in the output file property of the `tools/generators/open-api-config.ts` file.
312 | If you would like to avoid this, you can generate a single file by removing both properties [`outputFiles`, `filterEndpoints`]
313 |
314 | This script will generate the required [RTK Query](https://redux-toolkit.js.org/rtk-query/overview) code and caching keys so your data remains up to date while performing CRUD operations.
315 |
316 | For a complete example of CRUD operations, consult the `apps/webapp/src/pages/todos-page.tsx` file.
317 |
318 | In our example, we are using [material-ui](https://github.com/mui-org/material-ui), but you could replace that with any other framework.
319 |
320 | We also use [axios](https://github.com/axios/axios) to simplify our requests handling as it works very well with TypeScript.
321 |
322 | ### General
323 |
324 | We strongly believe that typing helps create a more robust program; thus, we use [TypeScript](https://github.com/microsoft/TypeScript).
325 |
326 | To facilitate and optimize the usage of the monorepo, we make use of [NX](https://github.com/nrwl/nx).
327 |
328 | [eslint](https://github.com/eslint/eslint) enforces excellent standards, and [prettier](https://github.com/prettier/prettier) helps you apply them.
329 |
330 | Commit messages must abide to those [guidelines](https://www.conventionalcommits.org/en/v1.0.0/). If you need help following them, simply run `npm run commit` and you will be prompted with an interactive menu.
331 |
332 | File and directory names are enforced by the custom-made `enforce-file-folder-naming-convention.ts`.
333 |
334 | Branch names are enforced before you even commit to ensure everyone adopts the same standard: `{issue-number}-{branch-work-title-kebab-case}`.
335 |
336 | For end-to-end testing, we use the notorious [cypress](https://github.com/cypress-io/cypress).
337 |
338 | We also have a pre-built CI toolkit for you that will build and run the tests.
339 |
--------------------------------------------------------------------------------
/cli/main.js.map:
--------------------------------------------------------------------------------
1 | {"version":3,"file":"main.js","mappings":"yMAAA,U,yBAAA,0CAEA,qCAEA,iCAEA,MAAaA,0CAA0C,YAAAC,QASrDC,gBACE,MAAMC,aAAe,CACnB,eACA,OACA,OACA,QACA,WACA,YACA,SACA,SACA,UACA,eACA,cACA,cAEIC,mBAAqB,UACrBC,eAAiB,GAEvB,SAASC,kBAAkBC,OACzB,MAAMC,UAAY,eAAKC,SAASF,OAAOG,QAAQ,YAAa,IACxDF,UAAUG,OAAS,IAAMR,aAAaS,SAASJ,YAAcA,UAAUK,MAAMT,qBAC/EC,eAAeS,KAAKP,OAIxB,MAAMQ,YAAc,GACpB,UAAW,MAAMR,SAAS,UAAAS,MAAK,eAAKC,KAAKC,UAAW,MAAOf,aAAcY,aACvET,kBAAkBC,OAGpB,IAAK,MAAMY,cAAcJ,YACvBT,kBAAkBa,YAGpB,GAAId,eAAeM,OAAS,EAAG,CAC7B,MAAMS,aAAe,GAAGf,eAAeM,8EAEvCU,QAAQC,MAAMF,cACdC,QAAQC,MAAMjB,gBAEdkB,QAAQC,KAAK,GAGfH,QAAQI,KAAK,wEApDjB,4EACS,kCAAAC,MAAQ,CAAC,CAAC,0CAEV,kCAAAC,MAAQ,YAAA1B,QAAQ2B,MAAM,CAC3BC,SAAU,YACVC,YAAa,wEACbC,SAAU,CAAC,CAAC,kBAAmB,kD,gMCZnC,2DACA,4DAEA,qCAEA,iCAEA,MAAaC,+BAA+B,YAAA/B,QAU1CC,gBACE,MAAM+B,mBAAqB,iCACrBC,6BAA+B,GAErChC,eAAeI,kBAAkBC,cACL,aAAG4B,SAASC,SAAS7B,MAAO,CAAE8B,SAAU,WACxCxB,MAAMoB,qBAE9BC,6BAA6BpB,KAAKP,OAItC,UAAW,MAAMA,SAAS,UAAAS,MAAK,eAAKC,KAAKC,UAAW,mBAAoB,UAChEZ,kBAAkBC,OAG1B,GAAI2B,6BAA6BvB,OAAS,EAAG,CAC3C,MAAMS,aAAe,GAAGc,6BAA6BvB,yGAErDU,QAAQC,MAAMF,cACdC,QAAQC,MAAMY,8BAEdX,QAAQC,KAAK,GAGfH,QAAQI,KAAK,wDAnCjB,sDACS,uBAAAC,MAAQ,CAAC,CAAC,8BAEV,uBAAAC,MAAQ,YAAA1B,QAAQ2B,MAAM,CAC3BC,SAAU,YACVC,YACE,mIACFC,SAAU,CAAC,CAAC,kBAAmB,oD,8LCdnC,2DACA,4DAEA,qCACA,kEACA,mEACA,kEAEA,iCAEA,MAAaO,6BAA6B,YAAArC,QASxCC,gBACE,MAAMqC,cAAgB,eAAKtB,KAAKC,UAAW,sCACrCsB,iBAAmB,GACnBC,UAAY,GAClB,IAAIC,iBAAmB,8HAQvB,UAAW,MAAMC,YAAY,UAAA3B,MAAKuB,cAAe,IAAK,CAEpD,GADwB,aAAGK,UAAUD,UAAUE,UAAYF,SAAS/B,SAAS,cACxD,CACnB,MAAMkC,UAAW,uBAAU,eAAKrC,SAASkC,SAAU,OAAOjC,QAAQ,aAAc,KAChF+B,UAAU3B,KAAKgC,UACf,MAAMC,uBAAyB,qDAEzBC,sBAAwB,6BACxBC,cAAgB,IAFQ,aAAGC,aAAaP,SAAU,CAAEN,SAAU,SAAUxB,MAAMkC,wBAAwB,GAE5DI,SAASH,wBAAwBI,KAAIC,SAAW,CAACA,QAAQ,MAAKC,OAE1GL,cAActC,OAAS,IACzB6B,iBAAiB1B,KAAK,YAAYgC,0BAAyB,uBAAUA,wBACrEJ,kBAAoB,oBAAmB,wBAAWI,iCACxDA,qDAEFG,cACCG,KAAIG,eACH,MAAMC,eAAiBD,aAAa3C,SAAS,OAAS,eAAiB,kBACvE,MAAO,SAAS2C,mBAAmBC,qBAAqBV,mBAEzD7B,KAAK,0BAOJyB,iBAAmBA,iBAAiBhC,QAAQ,oBAAqB8B,iBAAiBY,KAAIK,iBAAmBA,kBAAiBxC,KAAK,OAC/HyB,kBAAoB,mDACpBD,UAAUW,KAAIN,UAAY,OAAM,wBAAWA,yBAAwB7B,KAAK,aAGxE,aAAGyC,cAAc,GAAGnB,wCAAyCG,iBAAkB,CAAEL,SAAU,SAC3FhB,QAAQI,KAAK,aAAac,0CAtD9B,kDACS,qBAAAb,MAAQ,CAAC,CAAC,4BAEV,qBAAAC,MAAQ,YAAA1B,QAAQ2B,MAAM,CAC3BC,SAAU,aACVC,YAAa,gFACbC,SAAU,CAAC,CAAC,kBAAmB,kD,iMChBnC,2DACA,4DACA,iCACA,qCAEA,MAAa4B,gCAAgC,YAAA1D,QAS3CC,gBACE,MAAM0D,wBAA0B,eAAK3C,KAAKC,UAAW,yBAC/C2C,cAAgB,eAAK5C,KAAKC,UAAW,+BACrC4C,kBAAoB,GAE1B,UAAW,MAAMvD,SAAS,UAAAS,MAAK,eAAKC,KAAKC,UAAW,0BAA2B,IAAK,CAClF,MAAM6C,OAASxD,MAAMyD,MAAM,QAAQ,GAAGA,MAAM,KAAK,GAE5CF,kBAAkBC,UACrBD,kBAAkBC,QAAU,IAE9BD,kBAAkBC,QAAQjD,KAAKP,OAGjC,IAAI0D,iBAAmB,oNAKvB,MAAMC,cAAgBC,OAAOd,QAAQS,mBAClCM,OACAC,QAAO,CAACC,WAAYC,IAAKC,UAAW,IAAMF,UAAW,CAACC,KAAMC,SAAU,IACzE,IAAK,MAAOT,OAAQU,aAAcN,OAAOd,QAAQa,eAC/CD,kBAAoB,MAAMF,WAC1BE,kBAAoBS,4BAA4BD,WAChDR,kBAAoB,KAGtB,MAAMU,0BAA4B,aAAGC,WAAWhB,yBAC1CiB,mBAAqBC,SACzBH,gCAAkC,aAAGxC,SAASC,SAASwB,wBAAyB,CAAEvB,SAAU,SAAY,IAEpG0C,YAUV,SAAkBC,KAChB,IACIC,EACAC,IAFAC,KAAO,EAIX,IAAKF,EAAI,EAAGA,EAAID,IAAIrE,OAAQsE,IAC1BC,IAAMF,IAAII,WAAWH,GACrBE,MAAQA,MAAQ,GAAKA,KAAOD,IAC5BC,MAAQ,EAEV,OAAOA,KApBeE,CAASpB,kBACzBY,qBAAuBE,oBACnB,aAAG5C,SAASmD,UAAU1B,wBAAyBmB,YAAYQ,WAAY,CAAElD,SAAU,eACnF,aAAGF,SAASmD,UAAUzB,cAAeI,iBAAkB,CAAE5B,SAAU,SAEzEhB,QAAQI,KAAK,oDAkBnB,SAASiD,4BAA4BD,WACnC,OAAOA,UACJL,OACAhB,KAAIoC,UAGI,oBAFkBA,SAASxB,MAAM,QAAQ,GAAGtD,QAAQ,MAAO,WAInEO,KAAK,IAxEV,wDACS,wBAAAS,MAAQ,CAAC,CAAC,+BAEV,wBAAAC,MAAQ,YAAA1B,QAAQ2B,MAAM,CAC3BC,SAAU,aACVC,YAAa,8DACbC,SAAU,CAAC,CAAC,kBAAmB,qD,sLCXnC,2DACA,4DAEA,qCACA,kCAEA,iCAEA,MAAa0D,sBAAsB,YAAAxF,QAAnC,c,oBAEE,KAAAyF,aAAe,YAAAC,OAAOC,OAAO,iBAAkB,CAAEC,UAAU,IAC3D,KAAAC,QAAU,YAAAH,OAAOC,OAAO,YAAa,CAAEC,UAAU,IAQjD3F,sBACQ6F,KAAKC,gBAGb9F,sBACE,IAC4B,qBACH+F,KAAKF,KAAKL,gBAC/BrE,QAAQC,MAAM,uEACdC,QAAQC,KAAK,IAGM,qBACHyE,KAAKF,KAAKD,WAC1BzE,QAAQC,MAAM,kEACdC,QAAQC,KAAK,IAEf,MAAM0E,aAAeH,KAAKD,QAAQpF,QAAQ,KAAM,KAC1CyF,cAAgB,CAAC,qBAAsB,eAAgB,WAAY,UAAW,iBAE9EC,sBAAuB,WAAAC,WAAUN,KAAKD,SAEtCQ,eAAiB,CAAC,eAAgB,OAAQ,OAAQ,QAAS,UACjE,UAAW,MAAM/F,SAAS,UAAAS,MAAK,eAAKC,KAAKC,UAAW,OAAQoF,gBAAiB,CAE3E,UADwB,aAAGnE,SAASoE,MAAMhG,QAC5BsC,SAAU,CACtB,MAAM2D,kBAAoB,aAAGrE,SAASC,SAAS7B,MAAO,SACtD,GAAIiG,YAAa,CACf,MAAMC,eAAiBN,cAAcO,MAAKC,cAAgBpG,MAAMK,SAAS+F,gBACnEC,oBAAsBJ,YACzB9F,QAAQ,uBAAwBqF,KAAKL,cACrChF,QAAQ,YAAa+F,eAAiBP,aAAeE,4BAClD,aAAGjE,SAASmD,UAAU/E,MAAOqG,oBAAqB,WAK9DvF,QAAQI,KAAK,yXASb,MAAOH,OACPD,QAAQC,MAAMA,SA1DpB,oCACS,cAAAI,MAAQ,CAAC,CAAC,mBAIV,cAAAC,MAAQ,YAAA1B,QAAQ2B,MAAM,CAC3BC,SAAU,kBACVC,YAAa,4FACbC,SAAU,CAAC,CAAC,kBAAmB,2F,8KChBnC,2DACA,4DAEa,QAAAf,KAAOd,gBAAiB2G,IAAa1G,aAA6B2G,kBAA8B,IAC3G,UAAW,MAAMC,wBAAwB,aAAG5E,SAAS6E,QAAQH,KAAM,CACjE,MAAMI,UAAY,eAAKhG,KAAK4F,IAAKE,eAAeG,MAC5CH,eAAeI,gBAAkBhH,aAAaS,SAASmG,eAAeG,OACxEJ,kBAAkBhG,KAAKmG,kBAChB,UAAAjG,MAAKiG,UAAW9G,aAAc2G,oBAC5BC,eAAelE,iBAClBoE,c,0BCVZG,OAAOC,QAAUC,QAAQ,c,0BCAzBF,OAAOC,QAAUC,QAAQ,W,0BCAzBF,OAAOC,QAAUC,QAAQ,qB,0BCAzBF,OAAOC,QAAUC,QAAQ,sB,0BCAzBF,OAAOC,QAAUC,QAAQ,qB,0BCAzBF,OAAOC,QAAUC,QAAQ,U,0BCAzBF,OAAOC,QAAUC,QAAQ,O,yBCAzBF,OAAOC,QAAUC,QAAQ,UCCrBC,yBAA2B,GAG/B,SAASC,oBAAoBC,UAE5B,IAAIC,aAAeH,yBAAyBE,UAC5C,QAAqBE,IAAjBD,aACH,OAAOA,aAAaL,QAGrB,IAAID,OAASG,yBAAyBE,UAAY,CAGjDJ,QAAS,IAOV,OAHAO,oBAAoBH,UAAUL,OAAQA,OAAOC,QAASG,qBAG/CJ,OAAOC,Q,wHCrBf,2CAEA,iEACA,qDACA,mDACA,sDACA,0CAEO,CAAE,IAAKQ,MAAQtG,QAAQuG,KAExBC,IAAM,IAAI,YAAAC,IAAI,CAClBC,YAAa,aACbC,WAAY,qBACZC,cAAe,UAGjBJ,IAAIK,SAAS,iBAAA3C,eACbsC,IAAIK,SAAS,0BAAA9F,sBACbyF,IAAIK,SAAS,6BAAAzE,yBACboE,IAAIK,SAAS,4BAAApG,wBACb+F,IAAIK,SAAS,wCAAApI,mCACb+H,IAAIK,SAAS,YAAAC,SAASC,aACtBP,IAAIQ,QAAQV,MAAMW,MAAMnH,QAAQC,Q","sources":["webpack://stator/./apps/cli/src/commands/enforce-file-folder-naming-convention.ts","webpack://stator/./apps/cli/src/commands/enforce-valid-imports-api.ts","webpack://stator/./apps/cli/src/commands/generate-cache-key-file.ts","webpack://stator/./apps/cli/src/commands/generate-entity-index-file.ts","webpack://stator/./apps/cli/src/commands/rename-project.ts","webpack://stator/./apps/cli/src/utils.ts","webpack://stator/external commonjs \"clipanion\"","webpack://stator/external commonjs \"lodash\"","webpack://stator/external commonjs \"lodash/camelCase\"","webpack://stator/external commonjs \"lodash/capitalize\"","webpack://stator/external commonjs \"lodash/kebabCase\"","webpack://stator/external commonjs \"tslib\"","webpack://stator/external node-commonjs \"fs\"","webpack://stator/external node-commonjs \"path\"","webpack://stator/webpack/bootstrap","webpack://stator/./apps/cli/src/main.ts"],"sourcesContent":["import path from \"path\"\n\nimport { Command } from \"clipanion\"\n\nimport { walk } from \"../utils\"\n\nexport class EnforceFileFolderNamingConvention extends Command {\n static paths = [[\"enforce-file-folder-naming-convention\"]]\n\n static usage = Command.Usage({\n category: \"enforcers\",\n description: \"This script will make sure that your folders and file use kebab-case.\",\n examples: [[\"A basic example\", \"npm run stator-cli generate-cache-key-file\"]],\n })\n\n async execute(): Promise {\n const ignoredPaths = [\n \"node_modules\",\n \"dist\",\n \".git\",\n \".idea\",\n \".gitkeep\",\n \".eslintrc\",\n \".cache\",\n \"README\",\n \"LICENSE\",\n \"CONTRIBUTING\",\n \"dockerfiles\",\n \"Dockerfile\",\n ]\n const capitalLetterRegex = /[A-Z]/gm\n const errorPathPaths = []\n\n function validateEntryName(entry) {\n const entryName = path.basename(entry).replace(/\\.[^/.]+$/, \"\")\n if (entryName.length > 0 && !ignoredPaths.includes(entryName) && entryName.match(capitalLetterRegex)) {\n errorPathPaths.push(entry)\n }\n }\n\n const folderNames = []\n for await (const entry of walk(path.join(__dirname, \"..\"), ignoredPaths, folderNames)) {\n validateEntryName(entry)\n }\n\n for (const folderName of folderNames) {\n validateEntryName(folderName)\n }\n\n if (errorPathPaths.length > 0) {\n const errorMessage = `${errorPathPaths.length} files/directories do not respect the kebab-case convention enforced.`\n\n console.error(errorMessage)\n console.error(errorPathPaths)\n\n process.exit(1)\n }\n\n console.info(\"Congratulations, all your files and directories are properly named!\")\n }\n}\n","import fs from \"fs\"\nimport path from \"path\"\n\nimport { Command } from \"clipanion\"\n\nimport { walk } from \"../utils\"\n\nexport class EnforceValidImportsApi extends Command {\n static paths = [[\"enforce-valid-imports-api\"]]\n\n static usage = Command.Usage({\n category: \"enforcers\",\n description:\n \"This script will make sure that your imports are valid in the API. This is used to avoid import errors than can be hard to spot.\",\n examples: [[\"A basic example\", \"npm run stator-cli enforce-valid-imports-api\"]],\n })\n\n async execute(): Promise {\n const invalidImportRegex = /import .*stator\\/[a-zA-Z]+\\//gm\n const fileContainingInvalidImports = []\n\n async function validateEntryName(entry) {\n const fileContent = await fs.promises.readFile(entry, { encoding: \"utf-8\" })\n const match = fileContent.match(invalidImportRegex)\n if (match) {\n fileContainingInvalidImports.push(entry)\n }\n }\n\n for await (const entry of walk(path.join(__dirname, \"../apps/api/src\"), [])) {\n await validateEntryName(entry)\n }\n\n if (fileContainingInvalidImports.length > 0) {\n const errorMessage = `${fileContainingInvalidImports.length} file(s) have invalid imports. They should NOT look like this: \"@stator/models/something/entity\"`\n\n console.error(errorMessage)\n console.error(fileContainingInvalidImports)\n\n process.exit(1)\n }\n\n console.info(\"Congratulations, all your imports in api are valid!\")\n }\n}\n","import fs from \"fs\"\nimport path from \"path\"\n\nimport { Command } from \"clipanion\"\nimport camelCase from \"lodash/camelCase\"\nimport capitalize from \"lodash/capitalize\"\nimport kebabCase from \"lodash/kebabCase\"\n\nimport { walk } from \"../utils\"\n\nexport class GenerateCacheKeyFile extends Command {\n static paths = [[\"generate-cache-key-file\"]]\n\n static usage = Command.Usage({\n category: \"generators\",\n description: \"This script will generate the required cache key files for your redux webapp.\",\n examples: [[\"A basic example\", \"npm run stator-cli generate-cache-key-file\"]],\n })\n\n async execute(): Promise {\n const endpointsPath = path.join(__dirname, \"../apps/webapp/src/redux/endpoints\")\n const importStatements = []\n const cacheKeys = []\n let cacheFileContent = `/**\n * This file was automatically generated by tools/generators/generate-cache-file.js file\n */\n\nIMPORT_STATEMENTS\n\n`\n\n for await (const pathName of walk(endpointsPath, [])) {\n const isEndpointsFile = fs.lstatSync(pathName).isFile() && pathName.includes(\"-endpoints\")\n if (isEndpointsFile) {\n const cacheKey = camelCase(path.basename(pathName, \".ts\").replace(\"-endpoints\", \"\"))\n cacheKeys.push(cacheKey)\n const endpointsSelectorRegex = /build => \\(({[\\s\\S]+overrideExisting: false,\\s+})/m\n const endpointsObjectString = fs.readFileSync(pathName, { encoding: \"utf8\" }).match(endpointsSelectorRegex)[1]\n const endpointSelectorRegex = /([a-z-A-Z]+): build.[qm]/gm\n const endpointNames = [...endpointsObjectString.matchAll(endpointSelectorRegex)].map(entries => [entries[1]]).flat()\n\n if (endpointNames.length > 0) {\n importStatements.push(`import { ${cacheKey}Api } from \"./${kebabCase(cacheKey)}-endpoints\"`)\n cacheFileContent += `export const add${capitalize(cacheKey)}CacheKeys = () =>\n ${cacheKey}Api.enhanceEndpoints({\n endpoints: {\n${endpointNames\n .map(endpointName => {\n const tagPropertyKey = endpointName.includes(\"get\") ? \"providesTags\" : \"invalidatesTags\"\n return ` ${endpointName}: { ${tagPropertyKey}: [\"${cacheKey}\"] },`\n })\n .join(\"\\n\")}\n },\n })\\n`\n }\n }\n }\n\n cacheFileContent = cacheFileContent.replace(\"IMPORT_STATEMENTS\", importStatements.map(importStatement => importStatement).join(\"\\n\"))\n cacheFileContent += `export const addGeneratedCacheKeys = () => {\n ${cacheKeys.map(cacheKey => `add${capitalize(cacheKey)}CacheKeys()`).join(\"\\n\")}\n}\\n`\n\n fs.writeFileSync(`${endpointsPath}/generated-cache-keys.ts`, cacheFileContent, { encoding: \"utf8\" })\n console.info(`Generated ${endpointsPath}/generated-cache-keys.ts`)\n }\n}\n","import fs from \"fs\";\nimport path from \"path\";\nimport { walk } from \"../utils\";\nimport { Command } from \"clipanion\";\n\nexport class GenerateEntityIndexFile extends Command {\n static paths = [[\"generate-entity-index-file\"]]\n\n static usage = Command.Usage({\n category: \"generators\",\n description: \"This script will generate index file for the model library.\",\n examples: [[\"A basic example\", \"npm run stator-cli generate-entity-index-file\"]],\n })\n\n async execute(): Promise {\n const entityIndexLockFilePath = path.join(__dirname, \"entity-index-hash.txt\")\n const indexFilePath = path.join(__dirname, \"../libs/models/src/index.ts\")\n const filePathsByFolder = {}\n\n for await (const entry of walk(path.join(__dirname, \"../libs/models/src/lib\"), [])) {\n const folder = entry.split(\"lib/\")[1].split(\"/\")[0]\n\n if (!filePathsByFolder[folder]) {\n filePathsByFolder[folder] = []\n }\n filePathsByFolder[folder].push(entry)\n }\n\n let indexFileContent = `/**\n * This file was automatically generated by generate-entity-index.js file\n * You can disable the automatic generation by removing the prepare section of the workspace.json file under api section\n */\\n\\n`\n\n const sortedFolders = Object.entries(filePathsByFolder)\n .sort()\n .reduce((container, [key, value]) => ({ ...container, [key]: value }), {})\n for (const [folder, filePaths] of Object.entries(sortedFolders)) {\n indexFileContent += `// ${folder}\\n`\n indexFileContent += getExportLinesFromFilePaths(filePaths)\n indexFileContent += \"\\n\"\n }\n\n const entityIndexLockFileExists = fs.existsSync(entityIndexLockFilePath)\n const existingEntityHash = parseInt(\n entityIndexLockFileExists ? await fs.promises.readFile(entityIndexLockFilePath, { encoding: \"utf8\" }) : \"\"\n )\n const currentHash = hashCode(indexFileContent)\n if (existingEntityHash !== currentHash) {\n await fs.promises.writeFile(entityIndexLockFilePath, currentHash.toString(), { encoding: \"utf8\" })\n await fs.promises.writeFile(indexFilePath, indexFileContent, { encoding: \"utf8\" })\n\n console.info(\"Generated index file for shared entity library\")\n }\n }\n}\n\nfunction hashCode(str) {\n let hash = 0\n let i\n let chr\n\n for (i = 0; i < str.length; i++) {\n chr = str.charCodeAt(i)\n hash = (hash << 5) - hash + chr\n hash |= 0 // Convert to 32bit integer\n }\n return hash\n}\n\nfunction getExportLinesFromFilePaths(filePaths) {\n return filePaths\n .sort()\n .map(filePath => {\n const relevantFilePath = filePath.split(\"src/\")[1].replace(\".ts\", \"\")\n\n return `export * from \"./${relevantFilePath}\"\\n`\n })\n .join(\"\")\n}\n","import fs from \"fs\"\nimport path from \"path\"\n\nimport { Command, Option } from \"clipanion\"\nimport { camelCase, kebabCase } from \"lodash\"\n\nimport { walk } from \"../utils\"\n\nexport class RenameProject extends Command {\n static paths = [[\"rename-project\"]]\n organization = Option.String(\"--organization\", { required: true })\n project = Option.String(\"--project\", { required: true })\n\n static usage = Command.Usage({\n category: \"getting-started\",\n description: \"This script will rename all occurrences of stator and chocolat-chaud with your own names.\",\n examples: [[\"A basic example\", \"npm run stator-cli rename-project --organization chocolat-chaud-io --project stator\"]],\n })\n\n async execute(): Promise {\n await this.renameProject()\n }\n\n async renameProject() {\n try {\n const organizationRegex = /^[a-zA-Z-\\d_]+$/gim\n if (!organizationRegex.test(this.organization)) {\n console.error(\"The organization name must respect this regex /^[a-zA-Z-\\\\d_]+$/gmi\")\n process.exit(1)\n }\n\n const projectRegex = /^[a-zA-Z-\\d_]+$/gim\n if (!projectRegex.test(this.project)) {\n console.error(\"The project name must respect this regex /^[a-zA-Z-\\\\d_]+$/gmi\")\n process.exit(1)\n }\n const databaseName = this.project.replace(/-/g, \"_\")\n const databaseFiles = [\"docker-compose.yml\", \"seed-data.js\", \"init.sql\", \"test.ts\", \"orm-config.ts\"]\n\n const camelCaseProjectName = camelCase(this.project)\n\n const ignoredFolders = [\"node_modules\", \"dist\", \".git\", \".idea\", \".cache\"]\n for await (const entry of walk(path.join(__dirname, \"../\"), ignoredFolders)) {\n const entryStat = await fs.promises.lstat(entry)\n if (entryStat.isFile()) {\n const fileContent = await fs.promises.readFile(entry, \"utf-8\")\n if (fileContent) {\n const isDatabaseFile = databaseFiles.some(databaseFile => entry.includes(databaseFile))\n const replacedFileContent = fileContent\n .replace(/chocolat-chaud-io/gim, this.organization)\n .replace(/stator/gim, isDatabaseFile ? databaseName : camelCaseProjectName)\n await fs.promises.writeFile(entry, replacedFileContent, \"utf-8\")\n }\n }\n }\n\n console.info(`This is now YOUR project provided generously by:\n\n███████ ████████ █████ ████████ ██████ ██████ \n██ ██ ██ ██ ██ ██ ██ ██ ██ \n███████ ██ ███████ ██ ██ ██ ██████ \n ██ ██ ██ ██ ██ ██ ██ ██ ██ \n███████ ██ ██ ██ ██ ██████ ██ ██ \n \n `)\n } catch (error) {\n console.error(error as Error)\n }\n }\n}\n","import fs from \"fs\"\nimport path from \"path\"\n\nexport const walk = async function* (dir: string, ignoredPaths: Array, walkedFolderNames: string[] = []) {\n for await (const directoryEntry of await fs.promises.opendir(dir)) {\n const entryPath = path.join(dir, directoryEntry.name)\n if (directoryEntry.isDirectory() && !ignoredPaths.includes(directoryEntry.name)) {\n walkedFolderNames.push(entryPath)\n yield* walk(entryPath, ignoredPaths, walkedFolderNames)\n } else if (directoryEntry.isFile()) {\n yield entryPath\n }\n }\n}\n","module.exports = require(\"clipanion\");","module.exports = require(\"lodash\");","module.exports = require(\"lodash/camelCase\");","module.exports = require(\"lodash/capitalize\");","module.exports = require(\"lodash/kebabCase\");","module.exports = require(\"tslib\");","module.exports = require(\"fs\");","module.exports = require(\"path\");","// The module cache\nvar __webpack_module_cache__ = {};\n\n// The require function\nfunction __webpack_require__(moduleId) {\n\t// Check if module is in cache\n\tvar cachedModule = __webpack_module_cache__[moduleId];\n\tif (cachedModule !== undefined) {\n\t\treturn cachedModule.exports;\n\t}\n\t// Create a new module (and put it into the cache)\n\tvar module = __webpack_module_cache__[moduleId] = {\n\t\t// no module.id needed\n\t\t// no module.loaded needed\n\t\texports: {}\n\t};\n\n\t// Execute the module function\n\t__webpack_modules__[moduleId](module, module.exports, __webpack_require__);\n\n\t// Return the exports of the module\n\treturn module.exports;\n}\n\n","import { Builtins, Cli } from \"clipanion\"\n\nimport { EnforceFileFolderNamingConvention } from \"./commands/enforce-file-folder-naming-convention\"\nimport { EnforceValidImportsApi } from \"./commands/enforce-valid-imports-api\"\nimport { GenerateCacheKeyFile } from \"./commands/generate-cache-key-file\"\nimport { GenerateEntityIndexFile } from \"./commands/generate-entity-index-file\"\nimport { RenameProject } from \"./commands/rename-project\"\n\nconst [, , ...args] = process.argv\n\nconst cli = new Cli({\n binaryLabel: `stator-cli`,\n binaryName: `npm run stator-cli`,\n binaryVersion: `1.0.0`,\n})\n\ncli.register(RenameProject)\ncli.register(GenerateCacheKeyFile)\ncli.register(GenerateEntityIndexFile)\ncli.register(EnforceValidImportsApi)\ncli.register(EnforceFileFolderNamingConvention)\ncli.register(Builtins.HelpCommand)\ncli.runExit(args).catch(console.error)\n"],"names":["EnforceFileFolderNamingConvention","Command","async","ignoredPaths","capitalLetterRegex","errorPathPaths","validateEntryName","entry","entryName","basename","replace","length","includes","match","push","folderNames","walk","join","__dirname","folderName","errorMessage","console","error","process","exit","info","paths","usage","Usage","category","description","examples","EnforceValidImportsApi","invalidImportRegex","fileContainingInvalidImports","promises","readFile","encoding","GenerateCacheKeyFile","endpointsPath","importStatements","cacheKeys","cacheFileContent","pathName","lstatSync","isFile","cacheKey","endpointsSelectorRegex","endpointSelectorRegex","endpointNames","readFileSync","matchAll","map","entries","flat","endpointName","tagPropertyKey","importStatement","writeFileSync","GenerateEntityIndexFile","entityIndexLockFilePath","indexFilePath","filePathsByFolder","folder","split","indexFileContent","sortedFolders","Object","sort","reduce","container","key","value","filePaths","getExportLinesFromFilePaths","entityIndexLockFileExists","existsSync","existingEntityHash","parseInt","currentHash","str","i","chr","hash","charCodeAt","hashCode","writeFile","toString","filePath","RenameProject","organization","Option","String","required","project","this","renameProject","test","databaseName","databaseFiles","camelCaseProjectName","camelCase","ignoredFolders","lstat","fileContent","isDatabaseFile","some","databaseFile","replacedFileContent","dir","walkedFolderNames","directoryEntry","opendir","entryPath","name","isDirectory","module","exports","require","__webpack_module_cache__","__webpack_require__","moduleId","cachedModule","undefined","__webpack_modules__","args","argv","cli","Cli","binaryLabel","binaryName","binaryVersion","register","Builtins","HelpCommand","runExit","catch"],"sourceRoot":""}
--------------------------------------------------------------------------------