├── tests ├── fixtures │ ├── empty-fixtures.ts │ ├── simple.json │ ├── array-entry.json │ ├── ast-fixture.ts │ └── generated-fixture.ts ├── postprocesser.test.ts ├── writter.test.ts ├── reader.test.ts ├── utils.test.ts ├── checker.test.ts ├── preprocesser.test.ts └── ast.test.ts ├── jest.setup.ts ├── src ├── sample │ ├── array-entry.json │ ├── sample.json │ ├── demo.ts │ └── demo.json ├── postprocessor.ts ├── writter.ts ├── reader.ts ├── checker.ts ├── index.ts ├── preprocessor.ts ├── generator.ts ├── parser.ts ├── utils.ts └── ast.ts ├── tsconfig.build.json ├── .npmignore ├── .commitlintrc.js ├── tsconfig.json ├── .release-it.json ├── CHANGELOG.md ├── LICENSE ├── .cz-config.js ├── .gitignore ├── package.json ├── jest.config.ts └── README.md /tests/fixtures/empty-fixtures.ts: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /jest.setup.ts: -------------------------------------------------------------------------------- 1 | jest.setTimeout(100000); 2 | -------------------------------------------------------------------------------- /src/sample/array-entry.json: -------------------------------------------------------------------------------- 1 | [1, 2, 3, 4, { "a": 1 }] 2 | -------------------------------------------------------------------------------- /tests/fixtures/simple.json: -------------------------------------------------------------------------------- 1 | { 2 | "stringField": "linbudu", 3 | "numberField": 599, 4 | "booleanField": true 5 | } 6 | -------------------------------------------------------------------------------- /tests/fixtures/array-entry.json: -------------------------------------------------------------------------------- 1 | [ 2 | { 3 | "stringField": "linbudu", 4 | "numberField": 599, 5 | "booleanField": true 6 | } 7 | ] 8 | -------------------------------------------------------------------------------- /tsconfig.build.json: -------------------------------------------------------------------------------- 1 | { 2 | "extends": "./tsconfig.json", 3 | "include": ["src"], 4 | "exclude": ["tests", "node_modules", "src/sample"] 5 | } 6 | -------------------------------------------------------------------------------- /tests/fixtures/ast-fixture.ts: -------------------------------------------------------------------------------- 1 | import consola from "consola"; 2 | import { Scope, SyntaxKind } from "ts-morph"; 3 | 4 | class Foo {} 5 | 6 | class Bar {} 7 | -------------------------------------------------------------------------------- /.npmignore: -------------------------------------------------------------------------------- 1 | src 2 | .commitlintrc.js 3 | .cz-config.js 4 | .release-it.json 5 | .TODO 6 | yarn-error.log 7 | tests 8 | jest.config.ts 9 | jest.setup.ts 10 | coverage -------------------------------------------------------------------------------- /src/sample/sample.json: -------------------------------------------------------------------------------- 1 | { 2 | "booleanField": true, 3 | "numberField": 200, 4 | "stringField": "success", 5 | "primitiveArrayField": [1, 2, 3, 4, 5], 6 | "mixedField": [ 7 | 1, 8 | 2, 9 | { 10 | "a": "1111111" 11 | } 12 | ], 13 | "emptyArrayField": [], 14 | "nestedField": { 15 | "booleanField": true, 16 | "numberField": 200, 17 | "stringField": "success", 18 | "primitiveArrayField": [1, 2, 3, 4, 5], 19 | "mixedFieldrs": [1, 2] 20 | } 21 | } 22 | -------------------------------------------------------------------------------- /.commitlintrc.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | extents: ["cz"], 3 | rules: { 4 | "body-leading-blank": [1, "always"], 5 | "footer-leading-blank": [1, "always"], 6 | "header-max-length": [1, "always", 71], 7 | "scope-case": [1, "never", "lower-case"], 8 | "subject-case": [ 9 | 1, 10 | "never", 11 | ["sentence-case", "start-case", "pascal-case", "upper-case"], 12 | ], 13 | "subject-empty": [1, "never"], 14 | "subject-full-stop": [1, "never", "."], 15 | "type-case": [1, "always", "lower-case"], 16 | "type-empty": [1, "never"], 17 | "type-enum": [1, "always", []], 18 | }, 19 | }; 20 | -------------------------------------------------------------------------------- /src/postprocessor.ts: -------------------------------------------------------------------------------- 1 | import { SourceFile } from "ts-morph"; 2 | import omit from "lodash/omit"; 3 | 4 | import type { PostprocessorOptions } from "./utils"; 5 | 6 | /** 7 | * Post-process source file after it's processed by generator. 8 | * @param source 9 | * @param options 10 | * @returns 11 | */ 12 | export function postprocessor( 13 | source: SourceFile, 14 | options: PostprocessorOptions 15 | ): void { 16 | if ( 17 | options.customPostprocessor && 18 | typeof options.customPostprocessor === "function" 19 | ) { 20 | options.customPostprocessor(source, omit(options, ["customPostprocessor"])); 21 | } 22 | 23 | // TODO: remove unused decorators 24 | // TODO: more... 25 | } 26 | -------------------------------------------------------------------------------- /tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "include": ["src", "tests"], 3 | "exclude": ["node_modules"], 4 | "compilerOptions": { 5 | "module": "CommonJS", 6 | "target": "ES2018", 7 | "lib": ["esnext"], 8 | "declaration": true, 9 | "outDir": "dist", 10 | "sourceMap": true, 11 | "inlineSourceMap": false, 12 | "strict": true, 13 | "noImplicitReturns": true, 14 | "noUnusedLocals": false, 15 | "noUnusedParameters": false, 16 | "moduleResolution": "node", 17 | "esModuleInterop": true, 18 | "skipLibCheck": true, 19 | "forceConsistentCasingInFileNames": true, 20 | "experimentalDecorators": true, 21 | "emitDecoratorMetadata": true, 22 | "noEmit": false 23 | } 24 | } 25 | -------------------------------------------------------------------------------- /src/writter.ts: -------------------------------------------------------------------------------- 1 | import prettier from "prettier"; 2 | import fs from "fs-extra"; 3 | import type { WriterOptions } from "./utils"; 4 | 5 | /** 6 | * Format generated file and write 7 | * @param param 8 | */ 9 | export function writter({ 10 | outputPath, 11 | override, 12 | format, 13 | formatOptions, 14 | }: WriterOptions) { 15 | if (!outputPath) throw new Error("writer.outputPath is required!"); 16 | 17 | const raw = fs.readFileSync(outputPath, "utf-8"); 18 | 19 | const formatted = format 20 | ? prettier.format(raw, { 21 | parser: "typescript", 22 | tabWidth: 2, 23 | ...formatOptions, 24 | }) 25 | : raw; 26 | 27 | fs.writeFileSync(outputPath, formatted); 28 | } 29 | -------------------------------------------------------------------------------- /.release-it.json: -------------------------------------------------------------------------------- 1 | { 2 | "github": { 3 | "release": true, 4 | "tokenRef": "GITHUB_TOKEN" 5 | }, 6 | "npm": { 7 | "skipChecks": true 8 | }, 9 | "publishConfig": { 10 | "access": "public" 11 | }, 12 | "git": { 13 | "commitMessage": "release: v${version}", 14 | "changelog": "auto-changelog --stdout --commit-limit false -u --template https://raw.githubusercontent.com/release-it/release-it/master/templates/changelog-compact.hbs", 15 | "tag": true, 16 | "tagAnnotation": "Release ${version}", 17 | "push": true, 18 | "pushArgs": ["--follow-tags"], 19 | "addUntrackedFiles": true, 20 | "requireCleanWorkingDir": false 21 | }, 22 | "hooks": { 23 | "after:bump": "auto-changelog -p" 24 | } 25 | } 26 | -------------------------------------------------------------------------------- /src/sample/demo.ts: -------------------------------------------------------------------------------- 1 | import path from "path"; 2 | import fs from "fs-extra"; 3 | import transformer from ".."; 4 | import { reader, parser, generator, checker, writter } from ".."; 5 | 6 | const outputPath = path.join(__dirname, "./generated.ts"); 7 | 8 | fs.existsSync(outputPath) && fs.rmSync(outputPath); 9 | 10 | (async () => { 11 | await transformer({ 12 | reader: { path: path.join(__dirname, "./demo.json") }, 13 | // reader: { path: path.join(__dirname, "./sample.json") }, 14 | // reader: { url: "https://dog.ceo/api/breeds/image/random" }, 15 | parser: { 16 | forceNonNullable: false, 17 | forceReturnType: false, 18 | forceNonNullableListItem: false, 19 | }, 20 | generator: { entryClassName: "Root", sort: false }, 21 | checker: { 22 | disable: false, 23 | keep: true, 24 | }, 25 | writter: { 26 | outputPath, 27 | }, 28 | }); 29 | })(); 30 | -------------------------------------------------------------------------------- /CHANGELOG.md: -------------------------------------------------------------------------------- 1 | ### Changelog 2 | 3 | All notable changes to this project will be documented in this file. Dates are displayed in UTC. 4 | 5 | Generated by [`auto-changelog`](https://github.com/CookPete/auto-changelog). 6 | 7 | #### [0.6.1](https://github.com/linbudu599/JSON2TypeGraphQLClass/compare/0.5.4...0.6.1) 8 | 9 | - :bug: fix: fix release script [`ac25045`](https://github.com/linbudu599/JSON2TypeGraphQLClass/commit/ac25045264565b2f2a1cde60c46af25f13fd3bb3) 10 | 11 | #### 0.5.4 12 | 13 | > 16 November 2021 14 | 15 | - :construction: chore: update release workflow [`5ccfa43`](https://github.com/linbudu599/JSON2TypeGraphQLClass/commit/5ccfa43994a949dd89c1582f191f438312a0240d) 16 | - refactor [`584830a`](https://github.com/linbudu599/JSON2TypeGraphQLClass/commit/584830a263349b6db7653470e9439a70baae150d) 17 | - support array entry [`c90a5a5`](https://github.com/linbudu599/JSON2TypeGraphQLClass/commit/c90a5a57d3fa4fa2c9e8f68257c9d3aa8e8ea062) 18 | -------------------------------------------------------------------------------- /tests/postprocesser.test.ts: -------------------------------------------------------------------------------- 1 | import { Project } from "ts-morph"; 2 | import tmp from "tmp"; 3 | import fs from "fs-extra"; 4 | import { postprocessor } from "../src/postprocessor"; 5 | 6 | describe("should apply postprocess", () => { 7 | it("should use custom postprocessor", () => { 8 | const fn = jest.fn().mockImplementation((r) => {}); 9 | const tmpFile = tmp.fileSync().name; 10 | 11 | fs.writeFileSync(tmpFile, "const foo = 'bar'"); 12 | 13 | const source = new Project().addSourceFileAtPath(tmpFile); 14 | 15 | postprocessor(source, { 16 | customPostprocessor: fn, 17 | }); 18 | 19 | expect(fn).toHaveBeenCalledTimes(1); 20 | expect(fn).toHaveBeenCalledWith(source, {}); 21 | }); 22 | 23 | it("should skip when no custom postprocessor specified", () => { 24 | const tmpFile = tmp.fileSync().name; 25 | const source = new Project().addSourceFileAtPath(tmpFile); 26 | 27 | fs.writeFileSync(tmpFile, "const foo = 'bar'"); 28 | 29 | postprocessor(source, {}); 30 | 31 | // TODO: how do we know it's skipped? 32 | }); 33 | }); 34 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2021 Linbudu 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /src/sample/demo.json: -------------------------------------------------------------------------------- 1 | { 2 | "booleanField": true, 3 | "numberField": 200, 4 | "stringField": "success", 5 | "primitiveArrayField": [1, 2, 3, 4, 5], 6 | "mixedField": [ 7 | 1, 8 | 2, 9 | { 10 | "a": "1111111" 11 | } 12 | ], 13 | "emptyArrayField": [], 14 | "nestedField": { 15 | "booleanField": true, 16 | "numberField": 200, 17 | "stringField": "success", 18 | "primitiveArrayField": [1, 2, 3, 4, 5], 19 | "mixedFields": [ 20 | 1, 21 | 2, 22 | { 23 | "b": "1111111", 24 | "c": { 25 | "mixedField1": { 26 | "a111": "111" 27 | } 28 | } 29 | } 30 | ] 31 | }, 32 | "f": [ 33 | { 34 | "fa": 1, 35 | "fb": 2 36 | }, 37 | { 38 | "fa": 1, 39 | "fc": 2, 40 | "fd": [1, 2, 3, 4, 5], 41 | "ff": [] 42 | }, 43 | { 44 | "fa": 1, 45 | "fe": { 46 | "fea": 1, 47 | "feb": [1, 2, 3, 4, 5], 48 | "fec": [ 49 | { 50 | "feca": 1, 51 | "fecb": false, 52 | "fecc": ["name", "age", "prop"] 53 | } 54 | ] 55 | } 56 | } 57 | ] 58 | } 59 | -------------------------------------------------------------------------------- /src/reader.ts: -------------------------------------------------------------------------------- 1 | import jsonfile from "jsonfile"; 2 | import path from "path"; 3 | import got, { Options } from "got"; 4 | 5 | import type { 6 | MaybeArray, 7 | ReaderOptions, 8 | SourceArray, 9 | SourceObject, 10 | } from "./utils"; 11 | 12 | /** 13 | * Read content from certain resources 14 | * @param options 15 | * @returns 16 | */ 17 | export async function reader(options?: ReaderOptions) { 18 | // Use Invariant 19 | if (!options) { 20 | throw new Error("You must provide reader options!"); 21 | } 22 | 23 | if (options.path) return readFromFile(options.path); 24 | 25 | if (options.url) return await readFromRequest(options.url, options.options); 26 | 27 | if (options.raw) return options.raw; 28 | 29 | throw new Error( 30 | "You must provide oneof path/url/raw to get origin JSON content!" 31 | ); 32 | } 33 | 34 | export function readFromFile(filePath: string) { 35 | return jsonfile.readFileSync(filePath, { throws: true }); 36 | } 37 | 38 | export async function readFromRequest(url: string, options?: Options) { 39 | const res = await got(url, { 40 | responseType: "json", 41 | method: "GET", 42 | ...options, 43 | }); 44 | return (res as { body: MaybeArray | SourceArray }).body; 45 | } 46 | -------------------------------------------------------------------------------- /.cz-config.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | types: [ 3 | { value: ":sparkles: feat", name: "feat: " }, 4 | { value: ":pencil: docs", name: "docs: " }, 5 | { value: ":construction: chore", name: "chore: " }, 6 | { value: ":bug: fix", name: "fix: " }, 7 | { value: ":wrench: chore", name: "chore: " }, 8 | { value: ":ambulance: fix", name: "fix: " }, 9 | { value: ":zap: perf", name: "perf: " }, 10 | { value: ":construction_worker: ci", name: "ci: " }, 11 | { value: ":green_heart: ci", name: "ci: " }, 12 | { value: ":white_check_mark: test", name: "test: " }, 13 | { value: ":hammer: refactor", name: "refactor: " }, 14 | { value: ":lock: fix", name: "fix: " }, 15 | { value: ":rocket: deploy", name: "deploy: " }, 16 | { value: ":art: style", name: "style: " }, 17 | { value: ":heavy_plus_sign: add", name: "add: " }, 18 | { value: ":fire: del", name: "del: " }, 19 | { value: ":pencil2: docs", name: "docs: " }, 20 | { 21 | value: ":chart_with_upwards_trend: chore:", 22 | name: "chore: ", 23 | }, 24 | { value: ":bookmark: release", name: "release: " }, 25 | ], 26 | scopes: [], 27 | messages: { 28 | type: "TYPE:\n", 29 | scope: "SCOPE:\n", 30 | subject: "DESC:\n", 31 | body: 'DETAILED_DESC. 使用"|"换行:\n', 32 | footer: "ISSUE_CLOSING. E.g.: #31, #34:\n", 33 | confirmCommit: "CONFIRM?", 34 | }, 35 | allowCustomScopes: true, 36 | allowBreakingChanges: ["feat", "fix"], 37 | }; 38 | -------------------------------------------------------------------------------- /src/checker.ts: -------------------------------------------------------------------------------- 1 | import path from "path"; 2 | import fs from "fs-extra"; 3 | import { Project } from "ts-morph"; 4 | import execa from "execa"; 5 | import { createTmpResolverContent } from "./ast"; 6 | 7 | import type { CheckerOptions } from "./utils"; 8 | 9 | /** 10 | * Check can generated schema be used by buildSchemaSync 11 | * @param outputPath generated path 12 | * @param options checker options 13 | * @returns 14 | */ 15 | export async function checker(outputPath: string, options: CheckerOptions) { 16 | if (options.disable) return; 17 | 18 | const outputDir = path.dirname(outputPath); 19 | const tmpFilePath = path.resolve(outputDir, "generated_checker.ts"); 20 | 21 | fs.ensureFileSync(tmpFilePath); 22 | 23 | fs.writeFileSync(tmpFilePath, fs.readFileSync(outputPath, "utf-8")); 24 | 25 | const project = new Project(); 26 | 27 | const checkerOnlySource = project.addSourceFileAtPath(tmpFilePath); 28 | 29 | try { 30 | createTmpResolverContent(checkerOnlySource, options, "Root"); 31 | 32 | await execa( 33 | `ts-node ${tmpFilePath}`, 34 | [ 35 | "--compiler-options", 36 | JSON.stringify({ 37 | experimentalDecorators: true, 38 | emitDecoratorMetadata: true, 39 | declaration: false, 40 | ...options.executeOptions, 41 | }), 42 | ], 43 | { 44 | shell: true, 45 | stdio: "inherit", 46 | ...options.execaOptions, 47 | } 48 | ); 49 | } catch (error) { 50 | throw error; 51 | } finally { 52 | !options.keep && fs.existsSync(tmpFilePath) && fs.rmSync(tmpFilePath); 53 | } 54 | } 55 | -------------------------------------------------------------------------------- /tests/writter.test.ts: -------------------------------------------------------------------------------- 1 | import { writter } from "../src/writter"; 2 | import fs from "fs-extra"; 3 | import prettier from "prettier"; 4 | import tmp from "tmp"; 5 | 6 | let tmpFile: string; 7 | 8 | beforeEach(() => { 9 | tmpFile = tmp.fileSync().name; 10 | fs.writeFileSync(tmpFile, "const a:string = 'linbudu'"); 11 | }); 12 | 13 | describe("should handle write and format", () => { 14 | it("should throw on no output path specified", () => { 15 | // @ts-ignore 16 | expect(() => writter({})).toThrow("writer.outputPath is required!"); 17 | // @ts-ignore 18 | expect(() => writter({ outputPath: null })).toThrow( 19 | "writer.outputPath is required!" 20 | ); 21 | }); 22 | 23 | it("should write", () => { 24 | const rfs = jest 25 | .spyOn(fs, "readFileSync") 26 | .mockReturnValue("const a:string = 'linbudu'"); 27 | const wfs = jest.spyOn(fs, "writeFileSync").mockReturnValue(); 28 | 29 | writter({ outputPath: tmpFile, formatOptions: { singleQuote: true } }); 30 | 31 | expect(rfs).toBeCalledTimes(1); 32 | expect(rfs).toBeCalledWith(tmpFile, "utf-8"); 33 | 34 | expect(wfs).toBeCalledTimes(1); 35 | expect(wfs).toBeCalledWith(tmpFile, "const a:string = 'linbudu'"); 36 | }); 37 | 38 | it("should disable format", () => { 39 | const formatter = jest.spyOn(prettier, "format"); 40 | writter({ outputPath: tmpFile, format: false }); 41 | 42 | expect(formatter).not.toBeCalled(); 43 | 44 | writter({ outputPath: tmpFile, format: true }); 45 | 46 | expect(formatter).toBeCalledTimes(1); 47 | 48 | expect(formatter).toBeCalledWith("const a:string = 'linbudu'", { 49 | parser: "typescript", 50 | tabWidth: 2, 51 | }); 52 | }); 53 | }); 54 | -------------------------------------------------------------------------------- /tests/reader.test.ts: -------------------------------------------------------------------------------- 1 | import path from "path"; 2 | import fs from "fs-extra"; 3 | import { reader } from "../src/reader"; 4 | 5 | describe("should read content from various sources", () => { 6 | it("should throw when no options provided", async () => { 7 | await expect(reader()).rejects.toThrow("You must provide reader options!"); 8 | }); 9 | 10 | it("should throw when incorrect options provided", async () => { 11 | // @ts-ignore 12 | await expect(reader({ x: "xxx" })).rejects.toThrow( 13 | "You must provide oneof path/url/raw to get origin JSON content!" 14 | ); 15 | }); 16 | 17 | it("should read from JSON file", async () => { 18 | const raw = await reader({ 19 | path: path.resolve(__dirname, "./fixtures/simple.json"), 20 | }); 21 | 22 | expect(raw).not.toBeNull(); 23 | expect(typeof raw).toBe("object"); 24 | expect(Object.keys(raw).length).toBeGreaterThan(0); 25 | }); 26 | 27 | it("should read from JSON file(array-entry)", async () => { 28 | const raw = await reader({ 29 | path: path.resolve(__dirname, "./fixtures/array-entry.json"), 30 | }); 31 | 32 | expect(raw).not.toBeNull(); 33 | expect(Array.isArray(raw)).toBe(true); 34 | expect(raw.length).toBeGreaterThan(0); 35 | }); 36 | 37 | it("should read from raw object", async () => { 38 | const data = { 39 | stringField: "linbudu", 40 | numberField: 599, 41 | booleanField: true, 42 | }; 43 | 44 | expect(await reader({ raw: data })).toMatchObject(data); 45 | }); 46 | 47 | it("should read from request", async () => { 48 | const fetched = await reader({ 49 | url: "https://dog.ceo/api/breeds/image/random", 50 | }); 51 | 52 | expect(fetched).toBeDefined(); 53 | expect(typeof fetched).toBe("object"); 54 | expect(typeof fetched.message).toBe("string"); 55 | expect(typeof fetched.status).toBe("string"); 56 | }); 57 | }); 58 | -------------------------------------------------------------------------------- /src/index.ts: -------------------------------------------------------------------------------- 1 | import fs from "fs-extra"; 2 | import { Project } from "ts-morph"; 3 | 4 | import { reader } from "./reader"; 5 | import { preprocessor } from "./preprocessor"; 6 | import { parser } from "./parser"; 7 | import { generator } from "./generator"; 8 | import { postprocessor } from "./postprocessor"; 9 | import { checker } from "./checker"; 10 | import { writter } from "./writter"; 11 | 12 | import { normalizeOptions } from "./utils"; 13 | import type { Options } from "./utils"; 14 | 15 | /** 16 | * Generate TypeGraphQL class declaration from JSON object 17 | * @param content Input raw content 18 | * @param outputPath Output file path 19 | * @param options 20 | */ 21 | export default async function handler(options: Options): Promise { 22 | const content = await reader(options.reader); 23 | 24 | const { 25 | normalizedPreprocessorOptions, 26 | normalizedParserOptions, 27 | normalizedGeneratorOptions, 28 | normalizedPostprocessorOptions, 29 | normalizedCheckerOptions, 30 | normalizedWritterOptions, 31 | } = normalizeOptions(options); 32 | 33 | const originInput = content; 34 | 35 | const preprocessed = preprocessor(originInput, normalizedPreprocessorOptions); 36 | 37 | const parsedInfo = parser(preprocessed, normalizedParserOptions); 38 | 39 | fs.ensureFileSync(normalizedWritterOptions.outputPath); 40 | 41 | const source = new Project().addSourceFileAtPath( 42 | normalizedWritterOptions.outputPath 43 | ); 44 | 45 | generator(source, parsedInfo, normalizedGeneratorOptions); 46 | 47 | postprocessor(source, normalizedPostprocessorOptions); 48 | 49 | await checker(normalizedWritterOptions.outputPath, normalizedCheckerOptions); 50 | writter(normalizedWritterOptions); 51 | } 52 | 53 | export * from "./reader"; 54 | export * from "./preprocessor"; 55 | export * from "./parser"; 56 | export * from "./generator"; 57 | export * from "./postprocessor"; 58 | export * from "./checker"; 59 | export * from "./writter"; 60 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Logs 2 | logs 3 | *.log 4 | npm-debug.log* 5 | yarn-debug.log* 6 | yarn-error.log* 7 | lerna-debug.log* 8 | 9 | generated.ts 10 | testing.ts 11 | generated_checker.ts 12 | 13 | # Diagnostic reports (https://nodejs.org/api/report.html) 14 | report.[0-9]*.[0-9]*.[0-9]*.[0-9]*.json 15 | 16 | # Runtime data 17 | pids 18 | *.pid 19 | *.seed 20 | *.pid.lock 21 | 22 | # Directory for instrumented libs generated by jscoverage/JSCover 23 | lib-cov 24 | 25 | # Coverage directory used by tools like istanbul 26 | coverage 27 | *.lcov 28 | 29 | # nyc test coverage 30 | .nyc_output 31 | 32 | # Grunt intermediate storage (https://gruntjs.com/creating-plugins#storing-task-files) 33 | .grunt 34 | 35 | # Bower dependency directory (https://bower.io/) 36 | bower_components 37 | 38 | # node-waf configuration 39 | .lock-wscript 40 | 41 | # Compiled binary addons (https://nodejs.org/api/addons.html) 42 | build/Release 43 | 44 | # Dependency directories 45 | node_modules/ 46 | jspm_packages/ 47 | 48 | # TypeScript v1 declaration files 49 | typings/ 50 | 51 | # TypeScript cache 52 | *.tsbuildinfo 53 | 54 | # Optional npm cache directory 55 | .npm 56 | 57 | # Optional eslint cache 58 | .eslintcache 59 | 60 | # Microbundle cache 61 | .rpt2_cache/ 62 | .rts2_cache_cjs/ 63 | .rts2_cache_es/ 64 | .rts2_cache_umd/ 65 | 66 | # Optional REPL history 67 | .node_repl_history 68 | 69 | # Output of 'npm pack' 70 | *.tgz 71 | 72 | # Yarn Integrity file 73 | .yarn-integrity 74 | 75 | # dotenv environment variables file 76 | .env 77 | .env.test 78 | 79 | # parcel-bundler cache (https://parceljs.org/) 80 | .cache 81 | 82 | # Next.js build output 83 | .next 84 | 85 | # Nuxt.js build / generate output 86 | .nuxt 87 | dist 88 | 89 | # Gatsby files 90 | .cache/ 91 | # Comment in the public line in if your project uses Gatsby and *not* Next.js 92 | # https://nextjs.org/blog/next-9-1#public-directory-support 93 | # public 94 | 95 | # vuepress build output 96 | .vuepress/dist 97 | 98 | # Serverless directories 99 | .serverless/ 100 | 101 | # FuseBox cache 102 | .fusebox/ 103 | 104 | # DynamoDB Local files 105 | .dynamodb/ 106 | 107 | # TernJS port file 108 | .tern-port 109 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "json-type-graphql", 3 | "version": "0.6.1", 4 | "description": "Generate TypeGraphQL Class From JSON Object", 5 | "main": "dist/index.js", 6 | "repository": "https://github.com/linbudu599/JSON2TypeGraphQLClass.git", 7 | "license": "MIT", 8 | "scripts": { 9 | "release": "yarn build && release-it", 10 | "release:minor": "yarn build && release-it --minor", 11 | "release:major": "yarn build && release-it --major", 12 | "release:dry": "release-it --dry-run", 13 | "demo": "tsnd --respawn --rs --transpile-only ./src/sample/demo.ts", 14 | "build": "tsc -p tsconfig.build.json", 15 | "build:watch": "tsc --watch -p tsconfig.build.json", 16 | "build:check": "tsc --noEmit -p tsconfig.build.json", 17 | "commit": "git add -A & git-cz", 18 | "format": "prettier --check .", 19 | "format:fix": "prettier --write .", 20 | "test": "jest", 21 | "test:watch": "jest --color --watch --verbose --passWithNoTests", 22 | "test:affected": "jest --lastCommit", 23 | "test:list": "jest --listTests" 24 | }, 25 | "config": { 26 | "commitizen": { 27 | "path": "node_modules/cz-customizable" 28 | } 29 | }, 30 | "dependencies": { 31 | "capital-case": "^1.0.4", 32 | "execa": "^5.1.1", 33 | "fs-extra": "^10.0.0", 34 | "got": "^11.8.2", 35 | "graphql": "^15.5.3", 36 | "jsonfile": "^6.1.0", 37 | "lodash": "^4.17.21", 38 | "prettier": "^2.3.2", 39 | "reflect-metadata": "^0.1.13", 40 | "tmp": "^0.2.1", 41 | "ts-morph": "^12.0.0" 42 | }, 43 | "devDependencies": { 44 | "@commitlint/cli": "^13.1.0", 45 | "@commitlint/config-conventional": "^13.1.0", 46 | "@release-it/conventional-changelog": "^3.3.0", 47 | "@types/fs-extra": "^9.0.12", 48 | "@types/jest": "^27.0.1", 49 | "@types/jsonfile": "^6.0.1", 50 | "@types/lodash": "^4.14.172", 51 | "@types/node": "^16.7.10", 52 | "@types/prettier": "^2.3.2", 53 | "@types/tmp": "^0.2.1", 54 | "auto-changelog": "^2.3.0", 55 | "commitizen": "^4.2.4", 56 | "commitlint-config-cz": "^0.13.2", 57 | "consola": "^2.15.3", 58 | "cz-conventional-changelog": "^3.3.0", 59 | "cz-customizable": "^6.3.0", 60 | "husky": "^7.0.2", 61 | "is-ci": "^3.0.0", 62 | "jest": "^27.1.0", 63 | "pinst": "^2.1.6", 64 | "release-it": "^14.11.5", 65 | "ts-jest": "^27.0.5", 66 | "ts-node": "^10.2.1", 67 | "ts-node-dev": "^1.1.8", 68 | "type-graphql": "^1.1.1", 69 | "typescript": "^4.4.2" 70 | } 71 | } 72 | -------------------------------------------------------------------------------- /tests/fixtures/generated-fixture.ts: -------------------------------------------------------------------------------- 1 | import { ObjectType, Field, Int, ID } from "type-graphql"; 2 | 3 | @ObjectType() 4 | export class MixedField { 5 | @Field() 6 | a!: string; 7 | } 8 | 9 | @ObjectType() 10 | export class EmptyArrayField {} 11 | 12 | @ObjectType() 13 | export class MixedField1 { 14 | @Field({ nullable: true }) 15 | a111?: string; 16 | } 17 | 18 | @ObjectType() 19 | export class C { 20 | @Field((type) => MixedField1, { nullable: true }) 21 | mixedField1?: MixedField1; 22 | } 23 | 24 | @ObjectType() 25 | export class MixedFieldrs { 26 | @Field() 27 | b!: string; 28 | 29 | @Field((type) => C!) 30 | c!: C; 31 | } 32 | 33 | @ObjectType() 34 | export class NestedField { 35 | @Field({ nullable: true }) 36 | booleanField?: boolean; 37 | 38 | @Field((type) => Int, { nullable: true }) 39 | numberField?: number; 40 | 41 | @Field({ nullable: true }) 42 | stringField?: string; 43 | 44 | @Field((type) => [Int], { nullable: true }) 45 | primitiveArrayField?: number[]; 46 | 47 | @Field((type) => [MixedFieldrs], { nullable: true }) 48 | mixedFieldrs?: MixedFieldrs[]; 49 | } 50 | 51 | @ObjectType() 52 | export class Ff {} 53 | 54 | @ObjectType() 55 | export class Fec { 56 | @Field((type) => Int!) 57 | feca!: number; 58 | 59 | @Field() 60 | fecb!: boolean; 61 | 62 | @Field((type) => [String]!) 63 | fecc!: string[]; 64 | } 65 | 66 | @ObjectType() 67 | export class Fe { 68 | @Field((type) => Int, { nullable: true }) 69 | fea?: number; 70 | 71 | @Field((type) => [Int], { nullable: true }) 72 | feb?: number[]; 73 | 74 | @Field((type) => [Fec], { nullable: true }) 75 | fec?: Fec[]; 76 | } 77 | 78 | @ObjectType() 79 | export class F { 80 | @Field((type) => Int!) 81 | fa!: number; 82 | 83 | @Field((type) => Int, { nullable: true }) 84 | fb?: number; 85 | 86 | @Field((type) => Int, { nullable: true }) 87 | fc?: number; 88 | 89 | @Field((type) => [Int], { nullable: true }) 90 | fd?: number[]; 91 | 92 | @Field((type) => [Ff], { nullable: true }) 93 | ff?: Ff[]; 94 | 95 | @Field((type) => Fe, { nullable: true }) 96 | fe?: Fe; 97 | } 98 | 99 | @ObjectType() 100 | export class Root { 101 | @Field({ nullable: true }) 102 | booleanField?: boolean; 103 | 104 | @Field((type) => Int, { nullable: true }) 105 | numberField?: number; 106 | 107 | @Field({ nullable: true }) 108 | stringField?: string; 109 | 110 | @Field((type) => [Int], { nullable: true }) 111 | primitiveArrayField?: number[]; 112 | 113 | @Field((type) => [MixedField], { nullable: true }) 114 | mixedField?: MixedField[]; 115 | 116 | @Field((type) => [EmptyArrayField], { nullable: true }) 117 | emptyArrayField?: EmptyArrayField[]; 118 | 119 | @Field((type) => NestedField, { nullable: true }) 120 | nestedField?: NestedField; 121 | 122 | @Field((type) => [F], { nullable: true }) 123 | f?: F[]; 124 | } 125 | -------------------------------------------------------------------------------- /src/preprocessor.ts: -------------------------------------------------------------------------------- 1 | import { MaybeArray, strictTypeChecker, ValidFieldType } from "./utils"; 2 | import omit from "lodash/omit"; 3 | 4 | import type { SourceObject, SourceArray, PreprocessorOptions } from "./utils"; 5 | 6 | /** 7 | * Pre-process raw content before it's passed to parser. 8 | * Nested array will be removed for now(not support yet). 9 | * @param raw 10 | * @param options 11 | * @returns 12 | */ 13 | export function preprocessor( 14 | raw: MaybeArray | SourceArray, 15 | options: PreprocessorOptions 16 | ): MaybeArray | SourceArray { 17 | if ( 18 | options.customPreprocessor && 19 | typeof options.customPreprocessor === "function" 20 | ) { 21 | return options.customPreprocessor( 22 | raw, 23 | omit(options, ["customPreprocessor"]) 24 | ); 25 | } 26 | 27 | if (Array.isArray(raw)) { 28 | return arrayPreprocessor(raw, options); 29 | } 30 | 31 | for (const [k, v] of Object.entries(raw)) { 32 | // extract as `nestedArray` 33 | if (Array.isArray(v)) { 34 | v.length && Array.isArray(v[0]) 35 | ? // delete nested array directly at first 36 | delete raw[k] 37 | : (raw[k] = preprocessor(v, options)); 38 | } 39 | 40 | if (strictTypeChecker(v) === ValidFieldType.Object) { 41 | preprocessor(v, options); 42 | } 43 | } 44 | 45 | return raw; 46 | } 47 | 48 | /** 49 | * Ensure only object or primitive type exist in an array. 50 | * @param raw 51 | * @param param1 52 | * @returns 53 | */ 54 | export function arrayPreprocessor( 55 | raw: SourceArray, 56 | { preserveObjectOnlyInArray }: PreprocessorOptions 57 | ) { 58 | if (!raw.length) return raw; 59 | 60 | const { primitives, objects, shouldApplyProcess } = shouldProcess(raw); 61 | 62 | return shouldApplyProcess 63 | ? preserveObjectOnlyInArray 64 | ? objects 65 | : primitives 66 | : raw; 67 | } 68 | 69 | type ShouldProcessResult = { 70 | primitives: SourceArray; 71 | objects: SourceObject[]; 72 | shouldApplyProcess: boolean; 73 | }; 74 | 75 | /** 76 | * Determine should process array and split primitive & object members 77 | * @param arr 78 | * @returns 79 | */ 80 | export function shouldProcess( 81 | arr: SourceArray | SourceObject[] 82 | ): ShouldProcessResult { 83 | const primitives = preservePrimitiveTypeInArrayOnly(arr); 84 | 85 | const objects = preserveObjectTypeInArrayOnly(arr); 86 | 87 | return { 88 | primitives, 89 | objects, 90 | shouldApplyProcess: primitives.length !== 0 && objects.length !== 0, 91 | }; 92 | } 93 | 94 | /** 95 | * Preserve only object type in an array, primitive type will be removed. 96 | * @param arr 97 | * @returns 98 | */ 99 | export function preserveObjectTypeInArrayOnly( 100 | arr: SourceArray 101 | ): SourceObject[] { 102 | return arr.filter( 103 | (val) => strictTypeChecker(val) === ValidFieldType.Object 104 | ) as SourceObject[]; 105 | } 106 | 107 | /** 108 | * Preserve only primitive type in an array, object type will be removed. 109 | * @param arr 110 | * @returns 111 | */ 112 | export function preservePrimitiveTypeInArrayOnly( 113 | arr: SourceArray 114 | ): SourceArray { 115 | return arr.filter((val) => 116 | [ 117 | ValidFieldType.Number, 118 | ValidFieldType.Boolean, 119 | ValidFieldType.String, 120 | ].includes(strictTypeChecker(val)) 121 | ); 122 | } 123 | -------------------------------------------------------------------------------- /tests/utils.test.ts: -------------------------------------------------------------------------------- 1 | import { 2 | capitalCase, 3 | ensureArray, 4 | reverseObjectKeys, 5 | strictTypeChecker, 6 | normalizeClassFix, 7 | normalizeTypeFix, 8 | ValidFieldType, 9 | } from "../src/utils"; 10 | 11 | describe("should ensure utils!", () => { 12 | it("should transform to capitalCase", () => { 13 | expect(capitalCase("nested")).toBe("Nested"); 14 | expect(capitalCase("nestedType")).toBe("NestedType"); 15 | }); 16 | 17 | it("should ensure array type", () => { 18 | expect(ensureArray(1)).toEqual([1]); 19 | expect(ensureArray([1])).toEqual([1]); 20 | }); 21 | 22 | it("should reverse object keys", () => { 23 | expect( 24 | reverseObjectKeys({ 25 | a: { 26 | info: {}, 27 | parent: null, 28 | children: [], 29 | generated: true, 30 | }, 31 | b: { 32 | info: {}, 33 | parent: null, 34 | children: [], 35 | generated: true, 36 | }, 37 | c: { 38 | info: {}, 39 | parent: null, 40 | children: [], 41 | generated: true, 42 | }, 43 | }) 44 | ).toStrictEqual({ 45 | c: { 46 | info: {}, 47 | parent: null, 48 | children: [], 49 | generated: true, 50 | }, 51 | b: { 52 | info: {}, 53 | parent: null, 54 | children: [], 55 | generated: true, 56 | }, 57 | a: { 58 | info: {}, 59 | parent: null, 60 | children: [], 61 | generated: true, 62 | }, 63 | }); 64 | }); 65 | 66 | it("should check type", () => { 67 | expect(strictTypeChecker("linbudu")).toBe(ValidFieldType.String); 68 | expect(strictTypeChecker(null)).toBe(ValidFieldType.Null); 69 | expect(strictTypeChecker(undefined)).toBe(ValidFieldType.Undefined); 70 | expect(strictTypeChecker(599)).toBe(ValidFieldType.Number); 71 | expect(strictTypeChecker(true)).toBe(ValidFieldType.Boolean); 72 | 73 | expect(strictTypeChecker([])).toBe(ValidFieldType.Empty_Array); 74 | expect(strictTypeChecker([1, 2, 3])).toBe(ValidFieldType.Primitive_Array); 75 | expect(strictTypeChecker([1, "2", true])).toBe( 76 | ValidFieldType.Primitive_Array 77 | ); 78 | 79 | expect(strictTypeChecker({ foo: "bar" })).toBe(ValidFieldType.Object); 80 | expect(strictTypeChecker([{ foo: "bar" }, { foo: "bar" }])).toBe( 81 | ValidFieldType.Object_Array 82 | ); 83 | expect(strictTypeChecker(Symbol("linbudu"))).toBe(ValidFieldType.Ignore); 84 | }); 85 | 86 | it("should normalize class fix", () => { 87 | expect(normalizeClassFix(true, "foo")).toBe("foo"); 88 | expect(normalizeClassFix("bar", "")).toBe("bar"); 89 | expect(normalizeClassFix("bar", "foo")).toBe("bar"); 90 | expect(normalizeClassFix(false, "foo")).toBe(""); 91 | expect(normalizeClassFix(false, "")).toBe(""); 92 | }); 93 | 94 | it("should normalize type fix", () => { 95 | expect(normalizeTypeFix("foo", ValidFieldType.Boolean)).toBe(""); 96 | expect(normalizeTypeFix("foo", ValidFieldType.String)).toBe(""); 97 | expect(normalizeTypeFix("foo", ValidFieldType.Number)).toBe(""); 98 | expect(normalizeTypeFix("foo", ValidFieldType.Primitive_Array)).toBe(""); 99 | 100 | expect(normalizeTypeFix("foo", ValidFieldType.Empty_Array)).toBe("Foo"); 101 | expect(normalizeTypeFix("foo", ValidFieldType.Object)).toBe("Foo"); 102 | expect(normalizeTypeFix("foo", ValidFieldType.Object_Array)).toBe("Foo"); 103 | }); 104 | }); 105 | -------------------------------------------------------------------------------- /tests/checker.test.ts: -------------------------------------------------------------------------------- 1 | import path from "path"; 2 | import fs from "fs-extra"; 3 | import tmp from "tmp"; 4 | import * as execa from "execa"; 5 | import { Project, SourceFile } from "ts-morph"; 6 | import { checker } from "../src/checker"; 7 | import { CHECKER_MODULE_SPECIFIER } from "../src/utils"; 8 | 9 | const outputPath = path.resolve(__dirname, "./fixtures/generated-fixture.ts"); 10 | 11 | const emptyOutputPath = path.resolve(__dirname, "./fixtures/empty-fixtures.ts"); 12 | 13 | const tmpFilePath = path.resolve( 14 | path.dirname(outputPath), 15 | "generated_checker.ts" 16 | ); 17 | 18 | let source: SourceFile; 19 | 20 | beforeEach(() => { 21 | fs.ensureFileSync(tmpFilePath); 22 | source = new Project().addSourceFileAtPath(tmpFilePath); 23 | }); 24 | 25 | afterEach(() => { 26 | // fs.rmSync(tmpFilePath); 27 | new Project().removeSourceFile(source); 28 | }); 29 | describe("should check generated code", () => { 30 | it("should skip when disabled", async () => { 31 | const ensureFileSync = jest.spyOn(fs, "ensureFileSync"); 32 | const writeFileSync = jest.spyOn(fs, "writeFileSync"); 33 | 34 | await checker(outputPath, { 35 | disable: true, 36 | keep: true, 37 | execaOptions: {}, 38 | executeOptions: {}, 39 | buildSchemaOptions: {}, 40 | }); 41 | 42 | expect(ensureFileSync).not.toHaveBeenCalled(); 43 | expect(writeFileSync).not.toHaveBeenCalled(); 44 | }); 45 | 46 | it("should generate check file", async () => { 47 | await checker(outputPath, { 48 | disable: false, 49 | keep: true, 50 | execaOptions: {}, 51 | executeOptions: {}, 52 | buildSchemaOptions: {}, 53 | }); 54 | 55 | const checkerImport = source.getImportDeclaration( 56 | (imp) => imp.getModuleSpecifierValue() === CHECKER_MODULE_SPECIFIER 57 | ); 58 | 59 | expect(checkerImport).toBeDefined(); 60 | 61 | expect(checkerImport?.getDefaultImport()?.getText()).toBeUndefined(); 62 | 63 | const resolverClass = source.getClass("TmpResolver"); 64 | 65 | expect(resolverClass).toBeDefined(); 66 | 67 | expect(resolverClass?.getDecorator("Resolver")).toBeDefined(); 68 | expect( 69 | resolverClass 70 | ?.getDecorator("Resolver") 71 | ?.getArguments() 72 | .map((x) => x.getText()) 73 | ).toEqual(["(type)=>Root"]); 74 | 75 | expect(resolverClass?.getMethod("TmpResolver")?.isAsync()).toBeTruthy(); 76 | expect( 77 | resolverClass?.getMethod("TmpResolver")?.getDecorator("Query") 78 | ).toBeDefined(); 79 | expect( 80 | resolverClass 81 | ?.getMethod("TmpResolver") 82 | ?.getDecorator("Query") 83 | ?.getArguments() 84 | .map((x) => x.getText()) 85 | ).toEqual(["(type)=>[Root]"]); 86 | 87 | expect( 88 | resolverClass 89 | ?.getMethod("TmpResolver") 90 | ?.getStatements() 91 | .map((x) => x.getText()) 92 | ).toEqual(["return [];"]); 93 | 94 | expect(fs.readFileSync(tmpFilePath, "utf-8")).toContain("buildSchemaSync"); 95 | }); 96 | 97 | it("should keep generated checker by keep option", async () => { 98 | await checker(outputPath, { 99 | disable: false, 100 | keep: false, 101 | execaOptions: {}, 102 | executeOptions: {}, 103 | buildSchemaOptions: {}, 104 | }); 105 | 106 | expect(fs.existsSync(tmpFilePath)).toBeFalsy(); 107 | 108 | await checker(outputPath, { 109 | disable: false, 110 | keep: true, 111 | execaOptions: {}, 112 | executeOptions: {}, 113 | buildSchemaOptions: {}, 114 | }); 115 | 116 | expect(fs.existsSync(tmpFilePath)).toBeTruthy(); 117 | }); 118 | 119 | it.skip("should throw when erorr encountered", async () => { 120 | const outputSource = new Project().addSourceFileAtPath(outputPath); 121 | 122 | outputSource.getClasses().map((cls) => cls.remove()); 123 | 124 | outputSource.saveSync(); 125 | 126 | await expect( 127 | checker(outputPath, { 128 | disable: true, 129 | keep: true, 130 | execaOptions: {}, 131 | executeOptions: {}, 132 | buildSchemaOptions: {}, 133 | }) 134 | ).rejects.toThrow(); 135 | }); 136 | }); 137 | -------------------------------------------------------------------------------- /src/generator.ts: -------------------------------------------------------------------------------- 1 | import { Scope } from "ts-morph"; 2 | import type { 3 | SourceFile, 4 | DecoratorStructure, 5 | PropertyDeclarationStructure, 6 | OptionalKind, 7 | } from "ts-morph"; 8 | 9 | import { 10 | capitalCase, 11 | normalizeClassFix, 12 | normalizeTypeFix, 13 | reverseObjectKeys, 14 | DEFAULT_SUFFIX, 15 | DEFAULT_ENTRY_CLASS_NAME, 16 | BASE_IMPORTS, 17 | BASE_MODULE_SPECIFIER, 18 | } from "./utils"; 19 | import type { 20 | ProcessedFieldInfoObject, 21 | ClassGeneratorRecord, 22 | GeneratorOptions, 23 | RecordValue, 24 | } from "./utils"; 25 | 26 | import { 27 | addImportDeclaration, 28 | ImportType, 29 | invokeClassDeclarationGenerator, 30 | } from "./ast"; 31 | 32 | /** 33 | * Generate AST from parsed info 34 | * @param source source file 35 | * @param parsed parsed info 36 | * @param options generator options 37 | */ 38 | export function generator( 39 | source: SourceFile, 40 | parsed: ProcessedFieldInfoObject, 41 | options: GeneratorOptions 42 | ) { 43 | const classGeneratorRecord: ClassGeneratorRecord = {}; 44 | 45 | addImportDeclaration( 46 | source, 47 | BASE_IMPORTS, 48 | BASE_MODULE_SPECIFIER, 49 | ImportType.NAMED_IMPORTS, 50 | false 51 | ); 52 | 53 | collectClassStruInfo( 54 | source, 55 | parsed, 56 | classGeneratorRecord, 57 | undefined, 58 | options 59 | ); 60 | 61 | reverseRelation(classGeneratorRecord); 62 | 63 | const record = options.sort 64 | ? reverseObjectKeys(classGeneratorRecord) 65 | : classGeneratorRecord; 66 | 67 | invokeClassDeclarationGenerator(source, record, true); 68 | } 69 | 70 | /** 71 | * Collect class info record to generate from parsed info 72 | * @param source source file 73 | * @param parsed parsed info 74 | * @param record class info record 75 | * @param parent info parent (if exist) 76 | * @param options generator options 77 | */ 78 | export function collectClassStruInfo( 79 | source: SourceFile, 80 | parsed: ProcessedFieldInfoObject, 81 | record: ClassGeneratorRecord, 82 | parent: string | undefined, 83 | options: GeneratorOptions 84 | ): void { 85 | const { entryClassName, publicProps, readonlyProps, suffix, prefix } = 86 | options; 87 | 88 | const classDecorator: OptionalKind[] = [ 89 | { 90 | name: "ObjectType", 91 | arguments: [], 92 | }, 93 | ]; 94 | const properties: OptionalKind[] = []; 95 | 96 | const classPrefix = normalizeClassFix(prefix, entryClassName); 97 | const classSuffix = normalizeClassFix(suffix, DEFAULT_SUFFIX); 98 | 99 | for (const [k, v] of Object.entries(parsed)) { 100 | const typePrefix = normalizeTypeFix(classPrefix, v.type); 101 | const typeSuffix = normalizeTypeFix(classSuffix, v.type); 102 | 103 | const propType = `${typePrefix}${v.propType}${typeSuffix}${ 104 | v.list ? "[]" : "" 105 | }`; 106 | 107 | const returnType = `${typePrefix}${v.decoratorReturnType}${typeSuffix}`; 108 | 109 | if (v.nested) { 110 | collectClassStruInfo(source, v.fields!, record, entryClassName, { 111 | ...options, 112 | entryClassName: `${classPrefix}${v.propType}${typeSuffix}`, 113 | }); 114 | } 115 | 116 | const fieldReturnType: string[] = v.decoratorReturnType 117 | ? v.list 118 | ? [ 119 | `(type) => [${returnType}${v.nullableListItem ? "" : "!"}]${ 120 | v.nullable ? "" : "!" 121 | }`, 122 | ] 123 | : [`(type) => ${returnType}${v.nullable ? "" : "!"}`] 124 | : []; 125 | 126 | if (v.nullable) fieldReturnType.push(`{ nullable: true }`); 127 | 128 | properties.push({ 129 | name: v.prop, 130 | type: propType, 131 | decorators: [ 132 | { 133 | name: "Field", 134 | arguments: fieldReturnType, 135 | }, 136 | ], 137 | scope: publicProps.includes(v.prop) ? Scope.Public : undefined, 138 | trailingTrivia: (writer) => writer.newLine(), 139 | hasExclamationToken: !v.nullable, 140 | hasQuestionToken: v.nullable, 141 | isReadonly: readonlyProps.includes(v.prop), 142 | }); 143 | } 144 | 145 | const currentRecord: RecordValue = { 146 | info: { 147 | name: 148 | entryClassName === DEFAULT_ENTRY_CLASS_NAME 149 | ? DEFAULT_ENTRY_CLASS_NAME 150 | : capitalCase(`${entryClassName}`), 151 | decorators: classDecorator, 152 | properties, 153 | isExported: true, 154 | }, 155 | parent: parent ?? null, 156 | children: [], 157 | generated: false, 158 | }; 159 | 160 | record[entryClassName] = currentRecord; 161 | } 162 | /** 163 | * Fill children info to parent item 164 | * @param raw 165 | */ 166 | export function reverseRelation(raw: ClassGeneratorRecord) { 167 | for (const [k, v] of Object.entries(raw)) { 168 | if (v.parent) { 169 | raw[v.parent]["children"].push(k); 170 | } 171 | } 172 | } 173 | -------------------------------------------------------------------------------- /tests/preprocesser.test.ts: -------------------------------------------------------------------------------- 1 | import { 2 | preprocessor, 3 | preserveObjectTypeInArrayOnly, 4 | preservePrimitiveTypeInArrayOnly, 5 | } from "../src/preprocessor"; 6 | 7 | const objectOnlyArray = [{ foo: "bar" }]; 8 | const primitiveOnlyArray = [1, 2, 3, 4]; 9 | const mixedPrimitiveOnlyArray = [1, "linbudu", true]; 10 | const mixedArray = [...objectOnlyArray, ...primitiveOnlyArray]; 11 | 12 | describe("should process raw content", () => { 13 | it("should preserve object only", () => { 14 | expect(preserveObjectTypeInArrayOnly(objectOnlyArray)).toStrictEqual( 15 | objectOnlyArray 16 | ); 17 | 18 | expect(preserveObjectTypeInArrayOnly(primitiveOnlyArray)).toStrictEqual([]); 19 | expect( 20 | preserveObjectTypeInArrayOnly(mixedPrimitiveOnlyArray) 21 | ).toStrictEqual([]); 22 | expect(preserveObjectTypeInArrayOnly(mixedArray)).toStrictEqual( 23 | objectOnlyArray 24 | ); 25 | }); 26 | 27 | it("should primitive member only", () => { 28 | expect(preservePrimitiveTypeInArrayOnly(objectOnlyArray)).toStrictEqual([]); 29 | 30 | expect(preservePrimitiveTypeInArrayOnly(primitiveOnlyArray)).toStrictEqual( 31 | primitiveOnlyArray 32 | ); 33 | expect( 34 | preservePrimitiveTypeInArrayOnly(mixedPrimitiveOnlyArray) 35 | ).toStrictEqual(mixedPrimitiveOnlyArray); 36 | expect(preservePrimitiveTypeInArrayOnly(mixedArray)).toStrictEqual( 37 | primitiveOnlyArray 38 | ); 39 | }); 40 | 41 | it("should use cutom preprocessor if specified", () => { 42 | const fn = jest.fn().mockImplementation((r) => r); 43 | const raw = { foo: "bar" }; 44 | 45 | preprocessor(raw, { 46 | preserveObjectOnlyInArray: true, 47 | customPreprocessor: fn, 48 | }); 49 | expect(fn).toHaveBeenCalledTimes(1); 50 | expect(fn).toHaveBeenCalledWith(raw, { preserveObjectOnlyInArray: true }); 51 | }); 52 | 53 | it("should process array source", () => { 54 | expect( 55 | preprocessor(mixedArray, { preserveObjectOnlyInArray: true }) 56 | ).toStrictEqual(objectOnlyArray); 57 | 58 | expect( 59 | preprocessor(mixedArray, { preserveObjectOnlyInArray: false }) 60 | ).toStrictEqual(primitiveOnlyArray); 61 | }); 62 | 63 | it("should process object", () => { 64 | expect( 65 | preprocessor( 66 | { 67 | foo: { 68 | foo1: [["foo"]], 69 | foo2: "bar", 70 | foo3: [], 71 | }, 72 | }, 73 | { preserveObjectOnlyInArray: true } 74 | ) 75 | ).toStrictEqual({ 76 | foo: { 77 | foo2: "bar", 78 | foo3: [], 79 | }, 80 | }); 81 | 82 | expect( 83 | preprocessor( 84 | { 85 | foo: { 86 | foo1: "foo", 87 | foo2: "bar", 88 | foo3: [], 89 | }, 90 | }, 91 | { preserveObjectOnlyInArray: true } 92 | ) 93 | ).toStrictEqual({ 94 | foo: { 95 | foo1: "foo", 96 | foo2: "bar", 97 | foo3: [], 98 | }, 99 | }); 100 | 101 | expect( 102 | preprocessor( 103 | { 104 | foo: { 105 | foo1: "foo", 106 | foo2: "bar", 107 | foo3: { 108 | foo3arr: [], 109 | foo3nestedarr: [[]], 110 | }, 111 | }, 112 | }, 113 | { preserveObjectOnlyInArray: true } 114 | ) 115 | ).toStrictEqual({ 116 | foo: { 117 | foo1: "foo", 118 | foo2: "bar", 119 | foo3: { 120 | foo3arr: [], 121 | }, 122 | }, 123 | }); 124 | 125 | expect(preprocessor([], { preserveObjectOnlyInArray: true })).toStrictEqual( 126 | [] 127 | ); 128 | 129 | expect( 130 | preprocessor( 131 | { 132 | foo: { 133 | foo1: "foo", 134 | foo2: "bar", 135 | foo3: { 136 | foo3arr: ["foo", "bar"], 137 | foo3nestedarr: [[]], 138 | foo3mixedarr: [{ a: 1 }, "foo", "bar"], 139 | }, 140 | }, 141 | }, 142 | { preserveObjectOnlyInArray: true } 143 | ) 144 | ).toStrictEqual({ 145 | foo: { 146 | foo1: "foo", 147 | foo2: "bar", 148 | foo3: { 149 | foo3arr: ["foo", "bar"], 150 | foo3mixedarr: [{ a: 1 }], 151 | }, 152 | }, 153 | }); 154 | 155 | expect( 156 | preprocessor( 157 | { 158 | foo: { 159 | foo1: "foo", 160 | foo2: "bar", 161 | foo3: { 162 | foo3arr: ["foo", "bar"], 163 | foo3nestedarr: [[]], 164 | foo3mixedarr: [{ a: 1 }, "foo", "bar"], 165 | }, 166 | }, 167 | }, 168 | { preserveObjectOnlyInArray: false } 169 | ) 170 | ).toStrictEqual({ 171 | foo: { 172 | foo1: "foo", 173 | foo2: "bar", 174 | foo3: { 175 | foo3arr: ["foo", "bar"], 176 | foo3mixedarr: ["foo", "bar"], 177 | }, 178 | }, 179 | }); 180 | }); 181 | }); 182 | -------------------------------------------------------------------------------- /jest.config.ts: -------------------------------------------------------------------------------- 1 | /* 2 | * For a detailed explanation regarding each configuration property and type check, visit: 3 | * https://jestjs.io/docs/configuration 4 | */ 5 | 6 | export default { 7 | // All imported modules in your tests should be mocked automatically 8 | // automock: false, 9 | 10 | // Stop running tests after `n` failures 11 | // bail: 0, 12 | 13 | // The directory where Jest should store its cached dependency information 14 | // cacheDirectory: "/private/var/folders/hm/lyff6sxj0gndvyzqmfsj8sk80000gn/T/jest_dx", 15 | 16 | // Automatically clear mock calls and instances between every test 17 | // clearMocks: false, 18 | 19 | // Indicates whether the coverage information should be collected while executing the test 20 | collectCoverage: true, 21 | 22 | // An array of glob patterns indicating a set of files for which coverage information should be collected 23 | // collectCoverageFrom: undefined, 24 | 25 | // The directory where Jest should output its coverage files 26 | coverageDirectory: "coverage", 27 | 28 | // An array of regexp pattern strings used to skip coverage collection 29 | // coveragePathIgnorePatterns: [ 30 | // "/node_modules/" 31 | // ], 32 | 33 | // Indicates which provider should be used to instrument code for coverage 34 | // coverageProvider: "babel", 35 | 36 | // A list of reporter names that Jest uses when writing coverage reports 37 | // coverageReporters: [ 38 | // "json", 39 | // "text", 40 | // "lcov", 41 | // "clover" 42 | // ], 43 | 44 | // An object that configures minimum threshold enforcement for coverage results 45 | // coverageThreshold: undefined, 46 | 47 | // A path to a custom dependency extractor 48 | // dependencyExtractor: undefined, 49 | 50 | // Make calling deprecated APIs throw helpful error messages 51 | // errorOnDeprecated: false, 52 | 53 | // Force coverage collection from ignored files using an array of glob patterns 54 | // forceCoverageMatch: [], 55 | 56 | // A path to a module which exports an async function that is triggered once before all test suites 57 | // globalSetup: undefined, 58 | 59 | // A path to a module which exports an async function that is triggered once after all test suites 60 | // globalTeardown: undefined, 61 | 62 | // A set of global variables that need to be available in all test environments 63 | // globals: {}, 64 | 65 | // The maximum amount of workers used to run your tests. Can be specified as % or a number. E.g. maxWorkers: 10% will use 10% of your CPU amount + 1 as the maximum worker number. maxWorkers: 2 will use a maximum of 2 workers. 66 | // maxWorkers: "50%", 67 | 68 | // An array of directory names to be searched recursively up from the requiring module's location 69 | // moduleDirectories: [ 70 | // "node_modules" 71 | // ], 72 | 73 | // An array of file extensions your modules use 74 | // moduleFileExtensions: [ 75 | // "js", 76 | // "jsx", 77 | // "ts", 78 | // "tsx", 79 | // "json", 80 | // "node" 81 | // ], 82 | 83 | // A map from regular expressions to module names or to arrays of module names that allow to stub out resources with a single module 84 | // moduleNameMapper: {}, 85 | 86 | // An array of regexp pattern strings, matched against all module paths before considered 'visible' to the module loader 87 | // modulePathIgnorePatterns: [], 88 | 89 | // Activates notifications for test results 90 | // notify: false, 91 | 92 | // An enum that specifies notification mode. Requires { notify: true } 93 | // notifyMode: "failure-change", 94 | 95 | // A preset that is used as a base for Jest's configuration 96 | preset: "ts-jest", 97 | 98 | // Run tests from one or more projects 99 | // projects: undefined, 100 | 101 | // Use this configuration option to add custom reporters to Jest 102 | // reporters: undefined, 103 | 104 | // Automatically reset mock state between every test 105 | // resetMocks: false, 106 | 107 | // Reset the module registry before running each individual test 108 | // resetModules: false, 109 | 110 | // A path to a custom resolver 111 | // resolver: undefined, 112 | 113 | // Automatically restore mock state between every test 114 | // restoreMocks: false, 115 | 116 | // The root directory that Jest should scan for tests and modules within 117 | // rootDir: undefined, 118 | 119 | // A list of paths to directories that Jest should use to search for files in 120 | // roots: [ 121 | // "" 122 | // ], 123 | 124 | // Allows you to use a custom runner instead of Jest's default test runner 125 | // runner: "jest-runner", 126 | 127 | // The paths to modules that run some code to configure or set up the testing environment before each test 128 | // setupFiles: [], 129 | 130 | // A list of paths to modules that run some code to configure or set up the testing framework before each test 131 | // setupFilesAfterEnv: [], 132 | 133 | // The number of seconds after which a test is considered as slow and reported as such in the results. 134 | // slowTestThreshold: 5, 135 | 136 | // A list of paths to snapshot serializer modules Jest should use for snapshot testing 137 | // snapshotSerializers: [], 138 | 139 | // The test environment that will be used for testing 140 | // testEnvironment: "jest-environment-node", 141 | 142 | // Options that will be passed to the testEnvironment 143 | // testEnvironmentOptions: {}, 144 | 145 | // Adds a location field to test results 146 | // testLocationInResults: false, 147 | 148 | // The glob patterns Jest uses to detect test files 149 | // testMatch: [ 150 | // "**/__tests__/**/*.[jt]s?(x)", 151 | // "**/?(*.)+(spec|test).[tj]s?(x)" 152 | // ], 153 | 154 | // An array of regexp pattern strings that are matched against all test paths, matched tests are skipped 155 | // testPathIgnorePatterns: [ 156 | // "/node_modules/" 157 | // ], 158 | 159 | // The regexp pattern or array of patterns that Jest uses to detect test files 160 | // testRegex: [], 161 | 162 | // This option allows the use of a custom results processor 163 | // testResultsProcessor: undefined, 164 | 165 | // This option allows use of a custom test runner 166 | // testRunner: "jest-circus/runner", 167 | 168 | // This option sets the URL for the jsdom environment. It is reflected in properties such as location.href 169 | // testURL: "http://localhost", 170 | 171 | // Setting this value to "fake" allows the use of fake timers for functions such as "setTimeout" 172 | // timers: "real", 173 | 174 | // A map from regular expressions to paths to transformers 175 | // transform: undefined, 176 | 177 | // An array of regexp pattern strings that are matched against all source file paths, matched files will skip transformation 178 | // transformIgnorePatterns: [ 179 | // "/node_modules/", 180 | // "\\.pnp\\.[^\\/]+$" 181 | // ], 182 | 183 | // An array of regexp pattern strings that are matched against all modules before the module loader will automatically return a mock for them 184 | // unmockedModulePathPatterns: undefined, 185 | 186 | // Indicates whether each individual test should be reported during the run 187 | // verbose: undefined, 188 | 189 | // An array of regexp patterns that are matched against all source file paths before re-running tests in watch mode 190 | // watchPathIgnorePatterns: [], 191 | 192 | // Whether to use watchman for file crawling 193 | // watchman: true, 194 | }; 195 | -------------------------------------------------------------------------------- /src/parser.ts: -------------------------------------------------------------------------------- 1 | import intersection from "lodash/intersection"; 2 | import uniqBy from "lodash/uniqBy"; 3 | 4 | import { strictTypeChecker, capitalCase, ValidFieldType } from "./utils"; 5 | import type { 6 | MaybeArray, 7 | SourceObject, 8 | SourceArray, 9 | ProcessedFieldInfoObject, 10 | ParsedFieldInfo, 11 | ParserOptions, 12 | } from "./utils"; 13 | 14 | /** 15 | * Parse raw content to specific structure which can be consumed by generator 16 | * @param content Input content 17 | * @param options Parser options 18 | * @returns 19 | */ 20 | export function parser( 21 | content: MaybeArray | SourceArray, 22 | options: Required 23 | ): ProcessedFieldInfoObject { 24 | return Array.isArray(content) 25 | ? arrayEntryParser(content, options) 26 | : objectEntryParser(content, options); 27 | } 28 | 29 | /** 30 | * Handle array entry json structure parsing 31 | * @param content Input content 32 | * @param options Parser options 33 | * @returns 34 | */ 35 | export function arrayEntryParser( 36 | content: SourceObject[] | SourceArray, 37 | options: ParserOptions 38 | ): ProcessedFieldInfoObject { 39 | const { 40 | forceNonNullable, 41 | forceReturnType, 42 | arrayEntryProp, 43 | forceNonNullableListItem, 44 | } = options; 45 | const parsedFieldInfo: ProcessedFieldInfoObject = {}; 46 | 47 | if (!content.length) return {}; 48 | 49 | const randomItem = content[0]; 50 | const type = strictTypeChecker(randomItem); 51 | 52 | switch (type) { 53 | case ValidFieldType.String: 54 | case ValidFieldType.Boolean: 55 | case ValidFieldType.Number: 56 | parsedFieldInfo["TMP"] = { 57 | type: ValidFieldType.Primitive_Array, 58 | propType: typeof randomItem, 59 | nested: false, 60 | list: true, 61 | prop: arrayEntryProp, 62 | nullable: !forceNonNullable, 63 | nullableListItem: !forceNonNullableListItem, 64 | fields: null, 65 | decoratorReturnType: 66 | typeof randomItem === "number" 67 | ? "Int" 68 | : forceReturnType 69 | ? strictTypeChecker(randomItem) 70 | : null, 71 | }; 72 | 73 | break; 74 | 75 | case ValidFieldType.Object: 76 | parsedFieldInfo["TMP"] = { 77 | type: ValidFieldType.Object_Array, 78 | propType: capitalCase(arrayEntryProp), 79 | nested: true, 80 | list: true, 81 | prop: arrayEntryProp, 82 | nullable: !forceNonNullable, 83 | nullableListItem: !forceNonNullableListItem, 84 | fields: objectArrayParser(content as SourceObject[], options), 85 | decoratorReturnType: capitalCase(arrayEntryProp), 86 | }; 87 | 88 | break; 89 | 90 | case ValidFieldType.Empty_Array: 91 | parsedFieldInfo["TMP"] = { 92 | type, 93 | list: true, 94 | propType: capitalCase(arrayEntryProp), 95 | decoratorReturnType: capitalCase(arrayEntryProp), 96 | nested: true, 97 | nullable: !forceNonNullable, 98 | nullableListItem: !forceNonNullableListItem, 99 | prop: arrayEntryProp, 100 | fields: {}, 101 | }; 102 | break; 103 | 104 | case ValidFieldType.Primitive_Array: 105 | case ValidFieldType.Object_Array: 106 | case ValidFieldType.Null: 107 | case ValidFieldType.Undefined: 108 | case ValidFieldType.Ignore: 109 | break; 110 | } 111 | 112 | return parsedFieldInfo; 113 | } 114 | 115 | /** 116 | * Handle common object entry json structure parsing 117 | * @param content Input content 118 | * @param options Parser options 119 | * @returns 120 | */ 121 | export function objectEntryParser( 122 | content: SourceObject, 123 | options: ParserOptions 124 | ): ProcessedFieldInfoObject { 125 | const { forceNonNullable, forceReturnType, forceNonNullableListItem } = 126 | options; 127 | const parsedFieldInfo: ProcessedFieldInfoObject = {}; 128 | 129 | for (const [k, v] of Object.entries(content)) { 130 | const type = strictTypeChecker(v); 131 | const capitalCasedKey = capitalCase(k); 132 | 133 | switch (type) { 134 | case ValidFieldType.String: 135 | case ValidFieldType.Boolean: 136 | parsedFieldInfo[k] = { 137 | type, 138 | propType: typeof v, 139 | nested: false, 140 | prop: k, 141 | nullable: !forceNonNullable, 142 | list: false, 143 | fields: null, 144 | decoratorReturnType: forceReturnType ? type : null, 145 | }; 146 | 147 | break; 148 | 149 | case ValidFieldType.Number: 150 | parsedFieldInfo[k] = { 151 | type, 152 | propType: "number", 153 | nested: false, 154 | prop: k, 155 | nullable: !forceNonNullable, 156 | list: false, 157 | fields: null, 158 | decoratorReturnType: "Int", 159 | }; 160 | 161 | break; 162 | 163 | case ValidFieldType.Object: 164 | parsedFieldInfo[k] = { 165 | type, 166 | propType: capitalCasedKey, 167 | nested: true, 168 | list: false, 169 | prop: k, 170 | nullable: !forceNonNullable, 171 | decoratorReturnType: capitalCasedKey, 172 | fields: parser(v, options), 173 | }; 174 | 175 | break; 176 | 177 | case ValidFieldType.Empty_Array: 178 | parsedFieldInfo[k] = { 179 | type, 180 | list: true, 181 | propType: capitalCasedKey, 182 | decoratorReturnType: capitalCasedKey, 183 | nested: true, 184 | nullable: !forceNonNullable, 185 | nullableListItem: !forceNonNullableListItem, 186 | prop: k, 187 | fields: {}, 188 | }; 189 | break; 190 | 191 | case ValidFieldType.Primitive_Array: 192 | parsedFieldInfo[k] = { 193 | type, 194 | propType: typeof v[0], 195 | nested: false, 196 | list: true, 197 | prop: k, 198 | fields: null, 199 | nullable: !forceNonNullable, 200 | nullableListItem: !forceNonNullableListItem, 201 | decoratorReturnType: 202 | typeof v[0] === "number" ? "Int" : strictTypeChecker(v[0]), 203 | }; 204 | break; 205 | 206 | case ValidFieldType.Object_Array: 207 | parsedFieldInfo[k] = { 208 | type, 209 | list: true, 210 | propType: capitalCasedKey, 211 | decoratorReturnType: capitalCasedKey, 212 | nested: true, 213 | nullable: !forceNonNullable, 214 | nullableListItem: !forceNonNullableListItem, 215 | prop: k, 216 | fields: objectArrayParser(v, options), 217 | }; 218 | break; 219 | 220 | case ValidFieldType.Null: 221 | case ValidFieldType.Undefined: 222 | case ValidFieldType.Ignore: 223 | break; 224 | } 225 | } 226 | 227 | return parsedFieldInfo; 228 | } 229 | 230 | /** 231 | * Handle object member array parsing 232 | * @param arr 233 | * @param options 234 | * @returns 235 | */ 236 | export function objectArrayParser( 237 | arr: SourceObject[], 238 | options: ParserOptions 239 | ): ProcessedFieldInfoObject { 240 | const keys: string[][] = []; 241 | const parsedKeys: ParsedFieldInfo[] = []; 242 | 243 | const { forceNonNullable } = options ?? {}; 244 | 245 | const processedResult: ProcessedFieldInfoObject = {}; 246 | 247 | for (const item of arr) { 248 | keys.push(Object.keys(item)); 249 | } 250 | 251 | const intersectionKeys = intersection(...keys); 252 | 253 | intersectionKeys.forEach((key) => { 254 | const nonNullSharedItem = arr.filter( 255 | (item) => item[key] === 0 || item[key] === "" || !![item[key]] 256 | ); 257 | 258 | parsedKeys.push({ 259 | ...parser(nonNullSharedItem[0], options)[key], 260 | shared: true, 261 | // NOTE: shared keys are regarded as non-nullable value 262 | // even in common array entry situation 263 | nullable: false, 264 | }); 265 | }); 266 | 267 | for (const item of arr) { 268 | intersectionKeys.forEach((key) => { 269 | key in item && delete item[key]; 270 | }); 271 | } 272 | 273 | for (const item of arr) { 274 | for (const [k, v] of Object.entries(item)) { 275 | parsedKeys.push({ 276 | ...parser({ [k]: v }, options)[k], 277 | shared: false, 278 | nullable: !forceNonNullable, 279 | }); 280 | } 281 | } 282 | 283 | const result = uniqBy(parsedKeys, (key) => key.prop); 284 | 285 | result.forEach((item) => { 286 | processedResult[item.prop] = item; 287 | }); 288 | 289 | return processedResult; 290 | } 291 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # json-type-graphql 2 | 3 | Generate TypeGraphQL class definition from JSON/Remote API/JSON File. 4 | 5 | ## Start 6 | 7 | ```bash 8 | npm i json-type-graphql --save 9 | yarn add json-type-graphql --save 10 | pnpm i json-type-graphql --save 11 | ``` 12 | 13 | ## Current Feature 14 | 15 | This project is still under heavy development, the documentation is far from ready, but basic features are already supported: 16 | 17 | - Support `nested object type`(like: `{ foo: { bar: { baz:{} } } }`) and `array entry json`(like: `[{},{}]`) type generation. 18 | - Normal generator order as `P-C1-C11-C12-C2-C21-C3-C31`. 19 | - Customizable processing flow: reader / preprocessor / postprocessor / writer 20 | - ... 21 | 22 | ## Example 23 | 24 | > Run `yarn demo` to explore! 25 | 26 | JSON: 27 | 28 | ```json 29 | { 30 | "booleanField": true, 31 | "numberField": 200, 32 | "stringField": "success", 33 | "primitiveArrayField": [1, 2, 3, 4, 5], 34 | "mixedField": [ 35 | 1, 36 | 2, 37 | { 38 | "a": "1111111" 39 | } 40 | ], 41 | "emptyArrayField": [], 42 | "nestedField": { 43 | "booleanField": true, 44 | "numberField": 200, 45 | "stringField": "success", 46 | "primitiveArrayField": [1, 2, 3, 4, 5], 47 | "mixedFieldrs": [1, 2] 48 | } 49 | } 50 | ``` 51 | 52 | ```typescript 53 | import path from "path"; 54 | import fs from "fs-extra"; 55 | import transformer from "json-type-garphql"; 56 | 57 | (async () => { 58 | await transformer({ 59 | // Provide json file path 60 | reader: { path: path.join(__dirname, "./demo.json") }, 61 | // Customize parser behaviour 62 | parser: { 63 | forceNonNullable: false, 64 | }, 65 | // Customize generator behaviour 66 | generator: { entryClassName: "Root", sort: true }, 67 | // Check can generated TypeGraphQL class be used normally 68 | checker: { 69 | disable: false, 70 | }, 71 | // Write generated file! 72 | writter: { 73 | outputPath: path.join(__dirname, "./generated.ts"), 74 | }, 75 | }); 76 | })(); 77 | ``` 78 | 79 | > More options will be introduced below. 80 | 81 | generated: 82 | 83 | ```typescript 84 | import { ObjectType, Field, Int, ID } from "type-graphql"; 85 | 86 | @ObjectType() 87 | export class MixedField { 88 | @Field() 89 | a!: string; 90 | } 91 | 92 | @ObjectType() 93 | export class EmptyArrayField {} 94 | 95 | @ObjectType() 96 | export class NestedField { 97 | @Field({ nullable: true }) 98 | booleanField?: boolean; 99 | 100 | @Field((type) => Int, { nullable: true }) 101 | numberField?: number; 102 | 103 | @Field({ nullable: true }) 104 | stringField?: string; 105 | 106 | @Field((type) => [Int], { nullable: true }) 107 | primitiveArrayField?: number[]; 108 | 109 | @Field((type) => [Int], { nullable: true }) 110 | mixedFieldrs?: number[]; 111 | } 112 | 113 | @ObjectType() 114 | export class Root { 115 | @Field({ nullable: true }) 116 | booleanField?: boolean; 117 | 118 | @Field((type) => Int, { nullable: true }) 119 | numberField?: number; 120 | 121 | @Field({ nullable: true }) 122 | stringField?: string; 123 | 124 | @Field((type) => [Int], { nullable: true }) 125 | primitiveArrayField?: number[]; 126 | 127 | @Field((type) => [MixedField], { nullable: true }) 128 | mixedField?: MixedField[]; 129 | 130 | @Field((type) => [EmptyArrayField], { nullable: true }) 131 | emptyArrayField?: EmptyArrayField[]; 132 | 133 | @Field((type) => NestedField, { nullable: true }) 134 | nestedField?: NestedField; 135 | } 136 | ``` 137 | 138 | ## Programmatic Usage 139 | 140 | ```typescript 141 | import { 142 | reader, 143 | parser, 144 | preprocessor, 145 | generator, 146 | writter, 147 | } from "json-type-graphql"; 148 | 149 | export default async function handler(options: Options): Promise { 150 | // read from data source you want 151 | // you can also use custom reader 152 | const content = await reader(options.reader); 153 | 154 | // make some custom processing 155 | const preprocessed = preprocessor(content, normalizedPreprocessorOptions); 156 | 157 | // parse content 158 | const parsedInfo = parser(preprocessed, normalizedParserOptions); 159 | 160 | fs.ensureFileSync(normalizedWritterOptions.outputPath); 161 | 162 | const source = new Project().addSourceFileAtPath( 163 | normalizedWritterOptions.outputPath 164 | ); 165 | 166 | // generate AST and result! 167 | generator(source, parsedInfo, normalizedGeneratorOptions); 168 | 169 | // write! 170 | writter(normalizedWritterOptions); 171 | } 172 | ``` 173 | 174 | ## Options 175 | 176 | ### Reader 177 | 178 | **Reader** is responsible for reading data from different sources including `JSON File` / `URL Request` / `Raw JavaScript Object`, you must provide one of `reader.path` / `reader.url` / `reader.raw` options. 179 | 180 | #### Reader.Options 181 | 182 | - `path`(`string`): **Absoulte** JSON file path. 183 | - `url`(`string`) & `options`(`Got Options`): Using [got](https://www.npmjs.com/package/got) for data fetching: `got(url, options)`. 184 | - `raw`(`object` | `array`): Vanilla JavaScript Object / Array. 185 | 186 | After content acquisition got completed, the content will be passed to next handler called **preprocessor**. 187 | 188 | ### Preprocessor 189 | 190 | Preprocessor will perform some extra pre-processing works in the incoming content: 191 | 192 | - **Recursively delete** object field which value is kind of **nested array** like `[[]]`, this is not supported yet which may cause unexpected behaviours or errors. 193 | - Ensure array contains either **primitive type values** or **object type values**, by default,**only obejct values will be preserved** when the array 194 | contains both kinds of members(You can control this behaviour by `preprocessor.preserveObjectOnlyInArray`). 195 | 196 | #### Preprocessor.Options 197 | 198 | - `preserveObjectOnlyInArray`(`boolean`): `default: true` 199 | - `customPreprocessor`(`(raw: object | array) => object | array`): Use your own custom preprocessor, which accepts content from reader, and should return JavaScript Object / Array. 200 | 201 | ### Parser 202 | 203 | **Parser** will transform the pre-processed content to specific object structure, 204 | which will be consumed by `generator`. 205 | 206 | > Array entry structure(like `[]`) and object entry structure(like `{}`) will be parsed differently. 207 | 208 | #### Parser.Options 209 | 210 | - `forceNonNullable`(`boolean`): Mark all field as non-nullale. `default: true` 211 | - `forceNonNullableListItem`(`boolean`): Mark all list item as non-nullale. `default: false` 212 | - `forceReturnType`(`boolean`): Generate return type for even `string` / `boolean` field like `@Field((type) => String)`. `default: false` 213 | - `arrayEntryProp`(`string`): When parsing array-entry structure, use specified prop name like: `data: Data[]`. `default: 'data'`. 214 | For example, `[{ foo: 1 }]` will be parsed to: 215 | 216 | ```javascript 217 | class Data { 218 | foo: number; 219 | } 220 | 221 | class Root { 222 | data: Data[]; 223 | } 224 | ``` 225 | 226 | ### Generator 227 | 228 | **Generator** will traverse the parsed info, perform corresponding AST operations to generate class definitions with TypeGraphQL decorators. 229 | 230 | #### Generator.Options 231 | 232 | - `entryClassName`(`string`): The top-level generated entry class name. `default: 'Root'`. 233 | - `prefix`(`boolean` | `string`): Prefix for generated class name, you can set `prefix: true` to simply avoid repeated class specifier. `default: false`. 234 | By using parent class in child class name's prefix, like `RootChildSomeChildProp` is from: 235 | 236 | ```javascript 237 | class Root { 238 | child: RootChild; 239 | } 240 | 241 | class RootChild { 242 | someChildProp: RootChildSomeChildProp; 243 | } 244 | 245 | class RootChildSomeChildProp {} 246 | ``` 247 | 248 | - `suffix`(`boolean` | `string`): Suffix for generated class name, e.g. `RootType`, `Type` is the specified suffix.`default: false`. 249 | - `publicProps`(`string[]`): Prop names included by it will be attatched with `public` keyword. 250 | - `readonlyProps`(`string[]`): Prop names included by it will be attatched with `readonly` keyword. 251 | - `sort`(`boolean`): Should sort generated class in normal order like `P-C1-C11-C12-C2-C21-C3-C31`. `default: true`. 252 | 253 | ### Postprocessor 254 | 255 | **Postprocessor** is used to apply some post-process works on generated source (`TypeScript SourceFile`), you can use [ts-morph](https://ts-morph.com/) for simple and flexiable AST operations, which also powers the generator part indeed. 256 | 257 | #### Postprocessor.Options 258 | 259 | - `customPostprocessor`(`(source: SourceFile) => SourceFile`): Custom post-processor accepts the AST source file. 260 | 261 | ### Checker 262 | 263 | **Checker** will use generated class definitions to create a tmp reoslver, invoking `TypeGraphQL.buildSchemaSync` method to check if generated file works correctly. 264 | 265 | We're using `ts-node tmp-file.ts --compiler-options [options]` to perform the check under the hood. 266 | 267 | #### Checker.Options 268 | 269 | - `disable`(`boolean`): Disable checker. `default: true` 270 | - `keep`(`boolean`): Keey generated tmp checker file. `default: false` 271 | - `execaOptions`(`Execa Options`): Extra options passed to [execa](https://www.npmjs.com/package/execa). 272 | - `executeOptions`(`Ts-node compile Options`): Extra options passed to ts-node `--compiler-options`, which keeps same with TypeSctipt CompilerOptions. 273 | 274 | ### Writer 275 | 276 | **Writer** will format generated source file. 277 | 278 | #### Writer.options 279 | 280 | - `outputPath`(`string`): Output path. required. 281 | - `format`(`boolean`): Should perform formatting by `Prettier`. `default: true`. 282 | - `formatOptions`(`Prettier Options`): Options passed to `Prettier.format`. 283 | -------------------------------------------------------------------------------- /src/utils.ts: -------------------------------------------------------------------------------- 1 | import { Options as PrettierOptions } from "prettier"; 2 | import { capitalCase as originalCapitalCase } from "capital-case"; 3 | import { OptionalKind, ClassDeclarationStructure, SourceFile } from "ts-morph"; 4 | import { Options as GotOptions } from "got"; 5 | import { Options as ExecaOptions } from "execa"; 6 | import { CompilerOptions } from "typescript"; 7 | import { BuildSchemaOptions } from "type-graphql"; 8 | 9 | /** 10 | * Capitalize string, avoid incorrect behaviour like: "nestedType" -> "Nested Type" 11 | * @param str 12 | * @returns 13 | */ 14 | export const capitalCase: typeof originalCapitalCase = (str) => 15 | originalCapitalCase(str, { delimiter: "" }); 16 | 17 | export type ClassGeneratorRecord = Record< 18 | string, 19 | { 20 | info: OptionalKind; 21 | parent: string | null; 22 | children: string[]; 23 | generated: boolean; 24 | } 25 | >; 26 | 27 | export type ClassInfo = OptionalKind; 28 | 29 | export const ARRAY_ENTRY_STRUCTURE_PROP = "data"; 30 | 31 | export const DEFAULT_ENTRY_CLASS_NAME = "Root"; 32 | 33 | export const DEFAULT_ENTRY_CLASS_NAME_SUFFIX = "__TMP_CLASS_NAME_TYPE__"; 34 | 35 | export const DEFAULT_SUFFIX = "Type"; 36 | 37 | export const BASE_IMPORTS = ["ObjectType", "Field", "Int", "ID"]; 38 | export const BASE_MODULE_SPECIFIER = "type-graphql"; 39 | 40 | export const CHECKER_IMPORTS = ["Resolver", "Query", "buildSchemaSync"]; 41 | export const CHECKER_MODULE_SPECIFIER = "reflect-metadata"; 42 | 43 | export type SourceObject = Record; 44 | 45 | export type SourceArray = Array; 46 | 47 | export type ValidPrimitiveType = "string" | "number" | "boolean"; 48 | 49 | export type RecordValue = T extends Record ? R : never; 50 | 51 | export type MaybeArray = T | Array; 52 | 53 | export type ParsedFieldInfo = { 54 | type: ValidFieldType; 55 | nested: boolean; 56 | prop: string; 57 | propType: "string" | "number" | "boolean" | string; 58 | list: boolean; 59 | nullable: boolean; 60 | decoratorReturnType: string | null; 61 | fields: ProcessedFieldInfoObject | null; 62 | shared?: boolean; 63 | nullableListItem?: boolean; 64 | }; 65 | 66 | export type ReaderOptions = { 67 | path?: string; 68 | url?: string; 69 | options?: GotOptions; 70 | raw?: SourceObject | SourceObject[] | SourceArray; 71 | }; 72 | 73 | /** 74 | * Custom preprocess function 75 | */ 76 | export type PreprocessorFunc = ( 77 | raw: SourceObject | SourceObject[] | SourceArray, 78 | options: Omit 79 | ) => MaybeArray | SourceArray; 80 | 81 | export type PreprocessorOptions = { 82 | preserveObjectOnlyInArray: boolean; 83 | customPreprocessor?: PreprocessorFunc; 84 | }; 85 | 86 | export type ParserOptions = { 87 | forceNonNullable: boolean; 88 | forceNonNullableListItem: boolean; 89 | forceReturnType: boolean; 90 | arrayEntryProp: string; 91 | }; 92 | 93 | export type GeneratorOptions = { 94 | entryClassName: string; 95 | prefix: boolean | string; 96 | suffix: boolean | string; 97 | publicProps: string[]; 98 | readonlyProps: string[]; 99 | sort: boolean; 100 | }; 101 | 102 | export type PostprocessorFunc = ( 103 | source: SourceFile, 104 | options: Omit 105 | ) => void; 106 | 107 | export type PostprocessorOptions = { 108 | // removeUnusedDecorators: boolean; 109 | customPostprocessor?: PostprocessorFunc; 110 | }; 111 | 112 | export type CheckerOptions = { 113 | disable: boolean; 114 | keep: boolean; 115 | execaOptions: ExecaOptions; 116 | executeOptions: CompilerOptions; 117 | buildSchemaOptions: Omit; 118 | }; 119 | 120 | export type WriterOptions = { 121 | outputPath: string; 122 | override?: boolean; 123 | format?: boolean; 124 | formatOptions?: PrettierOptions; 125 | }; 126 | 127 | export type Options = { 128 | /** 129 | * Options pass to reader 130 | */ 131 | reader: Partial; 132 | /** 133 | * Options pass to pre-processor 134 | */ 135 | preprocessor?: Partial; 136 | /** 137 | * Options pass to parser 138 | */ 139 | parser?: Partial; 140 | /** 141 | * Options pass to generator 142 | */ 143 | generator?: Partial; 144 | /** 145 | * Options pass to post-processor 146 | */ 147 | postprocessor?: Partial; 148 | /** 149 | * Options pass to checker 150 | */ 151 | checker?: Partial; 152 | /** 153 | * Options pass to writer 154 | */ 155 | writter: WriterOptions; 156 | }; 157 | 158 | export type InferPartialTypeParam = T extends Partial ? R : T; 159 | 160 | export type NormalizedOptions = { 161 | [K in keyof Omit< 162 | Options, 163 | "reader" 164 | > as `normalized${Capitalize}Options`]-?: InferPartialTypeParam< 165 | Options[K] 166 | >; 167 | }; 168 | 169 | export function normalizeOptions(options: Options): NormalizedOptions { 170 | const { preserveObjectOnlyInArray = true, customPreprocessor = undefined } = 171 | options?.preprocessor ?? {}; 172 | 173 | const { 174 | forceNonNullable = true, 175 | forceReturnType = false, 176 | arrayEntryProp = ARRAY_ENTRY_STRUCTURE_PROP, 177 | forceNonNullableListItem = false, 178 | } = options.parser ?? {}; 179 | 180 | const { 181 | prefix = false, 182 | publicProps = [], 183 | readonlyProps = [], 184 | suffix = false, 185 | entryClassName = DEFAULT_ENTRY_CLASS_NAME, 186 | sort = true, 187 | } = options.generator ?? {}; 188 | 189 | const { customPostprocessor = undefined } = options.postprocessor ?? {}; 190 | 191 | const { 192 | disable: disableChecker = true, 193 | keep = false, 194 | execaOptions = {}, 195 | executeOptions = {}, 196 | buildSchemaOptions = {}, 197 | } = options.checker ?? {}; 198 | 199 | const { 200 | format = true, 201 | override = false, 202 | formatOptions = {}, 203 | outputPath, 204 | } = options.writter ?? {}; 205 | 206 | return { 207 | normalizedPreprocessorOptions: { 208 | preserveObjectOnlyInArray, 209 | customPreprocessor, 210 | }, 211 | normalizedParserOptions: { 212 | forceNonNullable, 213 | forceReturnType, 214 | arrayEntryProp, 215 | forceNonNullableListItem, 216 | }, 217 | normalizedGeneratorOptions: { 218 | prefix, 219 | publicProps, 220 | readonlyProps, 221 | suffix, 222 | entryClassName, 223 | sort, 224 | }, 225 | normalizedPostprocessorOptions: { 226 | customPostprocessor, 227 | }, 228 | normalizedCheckerOptions: { 229 | disable: sort || disableChecker, 230 | keep, 231 | execaOptions, 232 | executeOptions, 233 | buildSchemaOptions, 234 | }, 235 | normalizedWritterOptions: { 236 | format, 237 | override, 238 | formatOptions, 239 | outputPath, 240 | }, 241 | }; 242 | } 243 | 244 | export type ProcessedFieldInfoObject = Record; 245 | 246 | /** 247 | * Valid field type 248 | * which can be used by parser and generator to perform different operations on. 249 | */ 250 | export const enum ValidFieldType { 251 | Null = "Null", 252 | Undefined = "Undefined", 253 | String = "String", 254 | Number = "Number", 255 | Boolean = "Boolean", 256 | Object = "Object", 257 | Primitive_Array = "Primitive_Array", 258 | Object_Array = "Object_Array", 259 | Empty_Array = "Empty_Array", 260 | Ignore = "Ignore", 261 | } 262 | 263 | /** 264 | * Ensure args to be array 265 | * @param maybeArray 266 | * @returns 267 | */ 268 | export function ensureArray(maybeArray: T | T[]): T[] { 269 | return Array.isArray(maybeArray) ? maybeArray : [maybeArray]; 270 | } 271 | 272 | // FIXME: Use Map to ensure k-v are placed as insert order 273 | /** 274 | * Simply reverse object by reversing its keys. 275 | * @param object 276 | * @returns 277 | */ 278 | export function reverseObjectKeys( 279 | object: ClassGeneratorRecord 280 | ): ClassGeneratorRecord { 281 | const result: ClassGeneratorRecord = {}; 282 | for (const key of Object.keys(object).reverse()) { 283 | result[key] = object[key]; 284 | } 285 | 286 | return result; 287 | } 288 | 289 | /** 290 | * Classify field type to stricter kinds 291 | * @param val 292 | * @returns `ValidFieldType` 293 | */ 294 | export function strictTypeChecker(val: unknown): ValidFieldType { 295 | if (val === null) return ValidFieldType.Null; 296 | if (typeof val === "undefined") return ValidFieldType.Undefined; 297 | if (typeof val === "string") return ValidFieldType.String; 298 | if (typeof val === "number") return ValidFieldType.Number; 299 | if (typeof val === "boolean") return ValidFieldType.Boolean; 300 | 301 | if (Array.isArray(val)) { 302 | if (!val.length) return ValidFieldType.Empty_Array; 303 | return ["string", "number", "boolean"].includes(typeof val[0]) 304 | ? ValidFieldType.Primitive_Array 305 | : ValidFieldType.Object_Array; 306 | } 307 | 308 | if (typeof val === "object") return ValidFieldType.Object; 309 | 310 | return ValidFieldType.Ignore; 311 | } 312 | 313 | export function normalizeClassFix( 314 | fix: string | boolean, 315 | fallback: string 316 | ): string { 317 | return fix ? (typeof fix === "string" ? fix : fallback) : ""; 318 | } 319 | 320 | /** 321 | * Nornalize type fix, skip on specific field type 322 | * @param fix 323 | * @param type 324 | * @returns 325 | */ 326 | export function normalizeTypeFix(fix: string, type: ValidFieldType): string { 327 | return [ 328 | ValidFieldType.Boolean, 329 | ValidFieldType.String, 330 | ValidFieldType.Number, 331 | ValidFieldType.Primitive_Array, 332 | ].includes(type) 333 | ? "" 334 | : capitalCase(fix); 335 | } 336 | -------------------------------------------------------------------------------- /tests/ast.test.ts: -------------------------------------------------------------------------------- 1 | import prettier from "prettier"; 2 | import tmp from "tmp"; 3 | import fs from "fs-extra"; 4 | import path from "path"; 5 | import { Project, SourceFile } from "ts-morph"; 6 | import { 7 | checkExistClassDeclarations, 8 | removeNamedImportsMember, 9 | setNamedImportsMember, 10 | removeImportDeclarations, 11 | appendNamedImportsMember, 12 | addImportDeclaration, 13 | ImportType, 14 | removeClassDeclarations, 15 | classDeclarationGeneratorFromList, 16 | createTmpResolverContent, 17 | classDeclarationGenerator, 18 | invokeClassDeclarationGenerator, 19 | } from "../src/ast"; 20 | import { 21 | BASE_MODULE_SPECIFIER, 22 | CHECKER_IMPORTS, 23 | CHECKER_MODULE_SPECIFIER, 24 | } from "../src/utils"; 25 | 26 | let tmpFile: string; 27 | let source: SourceFile; 28 | 29 | beforeEach(() => { 30 | tmpFile = tmp.fileSync().name; 31 | 32 | fs.writeFileSync( 33 | tmpFile, 34 | fs.readFileSync( 35 | path.resolve(__dirname, "./fixtures/ast-fixture.ts"), 36 | "utf-8" 37 | ) 38 | ); 39 | source = new Project().addSourceFileAtPath(tmpFile); 40 | }); 41 | 42 | afterEach(() => { 43 | fs.rmSync(tmpFile); 44 | new Project().removeSourceFile(source); 45 | }); 46 | 47 | describe("should perform AST operations", () => { 48 | it("should generate simple class declaration", () => { 49 | invokeClassDeclarationGenerator( 50 | source, 51 | 52 | { 53 | FooBar: { 54 | info: { 55 | name: "FooBar", 56 | }, 57 | parent: null, 58 | children: [], 59 | generated: false, 60 | }, 61 | Wuhu: { 62 | info: { 63 | name: "Wuhu", 64 | }, 65 | parent: null, 66 | children: [], 67 | generated: false, 68 | }, 69 | }, 70 | true 71 | ); 72 | 73 | expect(source.getClasses().map((x) => x.getName())).toContain("Wuhu"); 74 | expect(source.getClasses().map((x) => x.getName())).toContain("FooBar"); 75 | }); 76 | 77 | it("should skip generated class info", () => { 78 | invokeClassDeclarationGenerator( 79 | source, 80 | 81 | { 82 | FooBar: { 83 | info: { 84 | name: "FooBar", 85 | }, 86 | parent: null, 87 | children: [], 88 | generated: true, 89 | }, 90 | Wuhu: { 91 | info: { 92 | name: "Wuhu", 93 | }, 94 | parent: null, 95 | children: [], 96 | generated: true, 97 | }, 98 | }, 99 | true 100 | ); 101 | 102 | expect(source.getClasses().map((x) => x.getName())).not.toContain("Wuhu"); 103 | expect(source.getClasses().map((x) => x.getName())).not.toContain("FooBar"); 104 | }); 105 | 106 | it("should generate with child info", () => { 107 | invokeClassDeclarationGenerator( 108 | source, 109 | 110 | { 111 | FooBar: { 112 | info: { 113 | name: "FooBar", 114 | }, 115 | parent: null, 116 | children: ["Wuhu"], 117 | generated: false, 118 | }, 119 | Wuhu: { 120 | info: { 121 | name: "Wuhu", 122 | }, 123 | parent: "FooBar", 124 | children: ["WuhuChild"], 125 | generated: false, 126 | }, 127 | WuhuChild: { 128 | info: { 129 | name: "WuhuChild", 130 | }, 131 | parent: "Wuhu", 132 | children: [], 133 | generated: false, 134 | }, 135 | }, 136 | true 137 | ); 138 | 139 | expect(source.getClasses().map((x) => x.getName())).toContain("FooBar"); 140 | expect(source.getClasses().map((x) => x.getName())).toContain("Wuhu"); 141 | expect(source.getClasses().map((x) => x.getName())).toContain("WuhuChild"); 142 | }); 143 | 144 | it("should check exist class", () => { 145 | expect(checkExistClassDeclarations(source)).toEqual(["Foo", "Bar"]); 146 | }); 147 | 148 | it("should remove named imports member", () => { 149 | removeNamedImportsMember(source, ["Scope"], "ts-morph"); 150 | 151 | expect( 152 | source 153 | .getImportDeclaration( 154 | (imp) => imp.getModuleSpecifierValue() === "ts-morph" 155 | )! 156 | .getNamedImports() 157 | .map((x) => x.getName()) 158 | ).not.toContain("Scope"); 159 | 160 | expect( 161 | source 162 | .getImportDeclaration( 163 | (imp) => imp.getModuleSpecifierValue() === "ts-morph" 164 | )! 165 | .getNamedImports() 166 | .map((x) => x.getName()) 167 | ).toContain("SyntaxKind"); 168 | }); 169 | 170 | it("should set named imports member", () => { 171 | setNamedImportsMember(source, ["Decorator"], "ts-morph", true); 172 | expect( 173 | source 174 | .getImportDeclaration( 175 | (imp) => imp.getModuleSpecifierValue() === "ts-morph" 176 | )! 177 | .getNamedImports() 178 | .map((x) => x.getName()) 179 | ).toEqual(["Decorator"]); 180 | }); 181 | 182 | it("should remove import declaration", () => { 183 | removeImportDeclarations(source, "ts-morph", true); 184 | expect( 185 | source.getImportDeclarations().map((imp) => imp.getModuleSpecifierValue()) 186 | ).not.toContain("ts-morph"); 187 | }); 188 | it("should add named imports", () => { 189 | // exist 190 | appendNamedImportsMember(source, ["SyntaxKind"], "ts-morph", true); 191 | // new 192 | appendNamedImportsMember(source, ["Decorator"], "ts-morph", true); 193 | expect( 194 | source 195 | .getImportDeclaration( 196 | (imp) => imp.getModuleSpecifierValue() === "ts-morph" 197 | )! 198 | .getNamedImports() 199 | .map((x) => x.getName()) 200 | ).toEqual(["Scope", "SyntaxKind", "Decorator"]); 201 | }); 202 | 203 | it("should add import declaration", () => { 204 | addImportDeclaration( 205 | source, 206 | "prettier", 207 | "prettier", 208 | ImportType.NAMESPACE_IMPORT, 209 | true 210 | ); 211 | 212 | addImportDeclaration( 213 | source, 214 | undefined, 215 | "reflect-metadata", 216 | ImportType.DEFAULT_IMPORT, 217 | true 218 | ); 219 | 220 | addImportDeclaration( 221 | source, 222 | ["green"], 223 | "chalk", 224 | ImportType.NAMED_IMPORTS, 225 | true 226 | ); 227 | 228 | const allImps = source 229 | .getImportDeclarations() 230 | .map((imp) => imp.getModuleSpecifierValue()); 231 | 232 | expect(allImps).toContain("prettier"); 233 | expect(allImps).toContain("reflect-metadata"); 234 | expect(allImps).toContain("chalk"); 235 | 236 | expect( 237 | source 238 | .getImportDeclaration( 239 | (imp) => imp.getModuleSpecifierValue() === "prettier" 240 | )! 241 | .getNamespaceImport() 242 | ?.getText() 243 | ).toEqual("prettier"); 244 | 245 | expect( 246 | source 247 | .getImportDeclaration( 248 | (imp) => imp.getModuleSpecifierValue() === "reflect-metadata" 249 | )! 250 | .getDefaultImport() 251 | ?.getText() 252 | ).toBeUndefined; 253 | 254 | expect( 255 | source 256 | .getImportDeclaration( 257 | (imp) => imp.getModuleSpecifierValue() === "chalk" 258 | )! 259 | .getNamedImports() 260 | .map((x) => x.getText()) 261 | ).toEqual(["green"]); 262 | }); 263 | 264 | it("should remove class declaration", () => { 265 | removeClassDeclarations(source, ["Foo"], true); 266 | expect(source.getClasses().map((cls) => cls.getName()!)).not.toContain( 267 | "Foo" 268 | ); 269 | expect(source.getClasses().map((cls) => cls.getName()!)).toContain("Bar"); 270 | }); 271 | 272 | it("should create class declarations", () => { 273 | classDeclarationGeneratorFromList( 274 | source, 275 | [{ name: "FooBar" }, { name: "Wuhu" }], 276 | true 277 | ); 278 | expect(source.getClasses().map((cls) => cls.getName()!)).toContain( 279 | "FooBar" 280 | ); 281 | expect(source.getClasses().map((cls) => cls.getName()!)).toContain("Wuhu"); 282 | }); 283 | 284 | it("should create tmp resolver content", () => { 285 | createTmpResolverContent( 286 | source, 287 | { 288 | buildSchemaOptions: { 289 | emitSchemaFile: true, 290 | nullableByDefault: true, 291 | dateScalarMode: "timestamp", 292 | }, 293 | disable: false, 294 | keep: true, 295 | execaOptions: {}, 296 | executeOptions: {}, 297 | }, 298 | "Root" 299 | ); 300 | 301 | expect( 302 | source.getImportDeclaration( 303 | (x) => x.getModuleSpecifierValue() === CHECKER_MODULE_SPECIFIER 304 | ) 305 | ).toBeDefined(); 306 | 307 | expect( 308 | source 309 | .getImportDeclaration( 310 | (x) => x.getModuleSpecifierValue() === CHECKER_MODULE_SPECIFIER 311 | ) 312 | ?.getDefaultImport() 313 | ?.getText() 314 | ).toBeUndefined(); 315 | 316 | // expect( 317 | // source 318 | // .getImportDeclaration( 319 | // (x) => x.getModuleSpecifierValue() === BASE_MODULE_SPECIFIER 320 | // ) 321 | // ?.getNamedImports() 322 | // .map((x) => x.getText()) 323 | // ).toEqual(CHECKER_IMPORTS); 324 | 325 | const tmpClass = source.getClass((cls) => cls.getName() === "TmpResolver"); 326 | 327 | expect(tmpClass).toBeDefined(); 328 | 329 | expect(tmpClass?.getDecorator("Resolver")).toBeDefined(); 330 | 331 | expect( 332 | tmpClass 333 | ?.getDecorator("Resolver") 334 | ?.getArguments() 335 | .map((x) => x.getText())[0] 336 | ).toBe(`(type)=>Root`); 337 | 338 | expect(tmpClass?.getMethod("TmpResolver")).toBeDefined(); 339 | expect(tmpClass?.getMethod("TmpResolver")?.isAsync()).toBeTruthy(); 340 | expect( 341 | tmpClass?.getMethod("TmpResolver")?.getDecorator("Query") 342 | ).toBeDefined(); 343 | expect( 344 | tmpClass 345 | ?.getMethod("TmpResolver") 346 | ?.getDecorator("Query") 347 | ?.getArguments() 348 | .map((x) => x.getText())[0] 349 | ).toBe(`(type)=>[Root]`); 350 | 351 | expect(tmpClass?.getMethods().map((x) => x.getName())[0]).toBe( 352 | "TmpResolver" 353 | ); 354 | }); 355 | }); 356 | -------------------------------------------------------------------------------- /src/ast.ts: -------------------------------------------------------------------------------- 1 | import type { ClassDeclaration, SourceFile, Statement, ts } from "ts-morph"; 2 | 3 | import { 4 | ensureArray, 5 | BASE_MODULE_SPECIFIER, 6 | CHECKER_IMPORTS, 7 | CHECKER_MODULE_SPECIFIER, 8 | CheckerOptions, 9 | } from "./utils"; 10 | import type { ClassGeneratorRecord, ClassInfo } from "./utils"; 11 | 12 | /** Global record for class declaration generation */ 13 | let collectedInfoRecord: ClassGeneratorRecord = {}; 14 | 15 | /** 16 | * Start class declaration generation from reversed record 17 | * @param source Source 18 | * @param record Generation record 19 | * @param apply Should apply change directly to the source file 20 | * @description Current generation order: `P - C1 - C1-1 - C2 - C2-1 - C3`, 21 | * will support `P - C1 - C2 - C3 - C1-1 - C1-2 in the future` 22 | */ 23 | export function invokeClassDeclarationGenerator( 24 | source: SourceFile, 25 | record: ClassGeneratorRecord, 26 | apply?: boolean 27 | ) { 28 | collectedInfoRecord = record; 29 | 30 | classDeclarationGenerator(source, record, apply); 31 | } 32 | 33 | /** 34 | * Traverse the global record to generate class declarations 35 | * Will execute recursively if record item containes non-empty children prop 36 | * @param source Source 37 | * @param record Generation record 38 | * @param apply Should apply change directly to the source file 39 | */ 40 | export function classDeclarationGenerator( 41 | source: SourceFile, 42 | record: ClassGeneratorRecord, 43 | apply?: boolean 44 | ): void { 45 | for (const [k, v] of Object.entries(record)) { 46 | !v?.generated && source.addClass(v.info); 47 | v.generated = true; 48 | if (v.children.length) { 49 | for (const child of v.children) { 50 | classDeclarationGenerator( 51 | source, 52 | { 53 | [child]: collectedInfoRecord[child], 54 | }, 55 | apply 56 | ); 57 | } 58 | } 59 | } 60 | 61 | apply && source.saveSync(); 62 | } 63 | 64 | /** 65 | * Check exist class declaration in current source file 66 | * @param source Source 67 | * @return List of exist class declaration specifier. 68 | */ 69 | export function checkExistClassDeclarations(source: SourceFile): string[] { 70 | return source.getClasses().map((x) => x.getName()!); 71 | } 72 | 73 | /** 74 | * Remove named imports from specific import 75 | * @param source 76 | * @param namedImportsToRemove 77 | * @param moduleSpecifier 78 | * @param apply 79 | */ 80 | export function removeNamedImportsMember( 81 | source: SourceFile, 82 | namedImportsToRemove: string[], 83 | moduleSpecifier: string, 84 | apply?: boolean 85 | ) { 86 | const target = source.getImportDeclaration( 87 | (dec) => dec.getModuleSpecifierValue() === moduleSpecifier 88 | ); 89 | 90 | const existNamedImports = target?.getNamedImports(); 91 | 92 | const remainedNamedImports = 93 | existNamedImports 94 | ?.filter((imp) => !namedImportsToRemove.includes(imp.getText())) 95 | .map((i) => i.getText()) ?? []; 96 | 97 | setNamedImportsMember(source, remainedNamedImports, moduleSpecifier, false); 98 | 99 | apply && source.saveSync(); 100 | } 101 | 102 | /** 103 | * Directly set named imports member, exist named imports will be removed. 104 | * @param source Source 105 | * @param namedImports New named imports to set 106 | * @param moduleSpecifier The import declaration to operate 107 | * @param apply Should apply change directly to the source file 108 | */ 109 | export function setNamedImportsMember( 110 | source: SourceFile, 111 | namedImports: string[], 112 | moduleSpecifier: string, 113 | apply?: boolean 114 | ) { 115 | const target = source.getImportDeclaration( 116 | (dec) => dec.getModuleSpecifierValue() === moduleSpecifier 117 | ); 118 | 119 | target?.removeNamedImports(); 120 | 121 | target?.addNamedImports(namedImports); 122 | 123 | apply && source.saveSync(); 124 | } 125 | 126 | /** 127 | * Remove specific import declarations 128 | * @param source 129 | * @param specifier 130 | * @param apply 131 | */ 132 | export function removeImportDeclarations( 133 | source: SourceFile, 134 | specifier: string | string[], 135 | apply?: boolean 136 | ) { 137 | const specifierList = ensureArray(specifier); 138 | 139 | source 140 | .getImportDeclarations() 141 | .filter((imp) => specifierList.includes(imp.getModuleSpecifierValue())) 142 | .forEach((imp) => imp.remove()); 143 | 144 | apply && source.saveSync(); 145 | } 146 | 147 | /** 148 | * Append new named imports member 149 | * @param source Source 150 | * @param namedImports New named imports to append 151 | * @param moduleSpecifier The import declaration to operate 152 | * @param apply Should apply change directly to the source file 153 | */ 154 | export function appendNamedImportsMember( 155 | source: SourceFile, 156 | namedImports: string[], 157 | moduleSpecifier: string, 158 | apply?: boolean 159 | ) { 160 | const target = source.getImportDeclaration( 161 | (dec) => dec.getModuleSpecifierValue() === moduleSpecifier 162 | ); 163 | 164 | const existNamedImports = target 165 | ?.getNamedImports() 166 | .map((imp) => imp.getText()); 167 | 168 | const namedImportsToAppend = namedImports.filter( 169 | (imp) => !existNamedImports?.includes(imp) 170 | ); 171 | 172 | target?.addNamedImports(namedImportsToAppend); 173 | 174 | apply && source.saveSync(); 175 | } 176 | 177 | export enum ImportType { 178 | NAMESPACE_IMPORT = "NAMESPACE_IMPORT", 179 | NAMED_IMPORTS = "NAMED_IMPORTS", 180 | DEFAULT_IMPORT = "DEFAULT_IMPORT", 181 | } 182 | 183 | /** 184 | * Add a namespace import declaration in source file 185 | * @param source Source 186 | * @param namespace Namespace import 187 | * @param moduleSpecifier The import declaration to operate 188 | */ 189 | export function addImportDeclaration( 190 | source: SourceFile, 191 | namespace: string, 192 | moduleSpecifier: string, 193 | importType: ImportType.NAMESPACE_IMPORT, 194 | apply?: boolean 195 | ): void; 196 | 197 | /** 198 | * Add a named import declaration in source file 199 | * @param source Source 200 | * @param namedImports Named imports 201 | * @param moduleSpecifier The import declaration to operate 202 | */ 203 | export function addImportDeclaration( 204 | source: SourceFile, 205 | namedImports: string[], 206 | moduleSpecifier: string, 207 | importType: ImportType.NAMED_IMPORTS, 208 | apply?: boolean 209 | ): void; 210 | 211 | /** 212 | * Add a default import declaration in source file 213 | * @param source Source 214 | * @param defaultImport Default import 215 | * @param moduleSpecifier The import declaration to operate 216 | */ 217 | export function addImportDeclaration( 218 | source: SourceFile, 219 | defaultImport: string | undefined, 220 | moduleSpecifier: string, 221 | importType: ImportType.DEFAULT_IMPORT, 222 | apply?: boolean 223 | ): void; 224 | 225 | export function addImportDeclaration( 226 | source: SourceFile, 227 | importClause: string | undefined | string[], 228 | moduleSpecifier: string, 229 | importType: ImportType, 230 | apply?: boolean 231 | ) { 232 | switch (importType) { 233 | case ImportType.DEFAULT_IMPORT: 234 | source.addImportDeclaration({ 235 | defaultImport: importClause as string, 236 | moduleSpecifier, 237 | }); 238 | 239 | break; 240 | 241 | case ImportType.NAMED_IMPORTS: 242 | source.addImportDeclaration({ 243 | namedImports: ensureArray(importClause as string), 244 | moduleSpecifier, 245 | }); 246 | 247 | break; 248 | 249 | case ImportType.NAMESPACE_IMPORT: 250 | source.addImportDeclaration({ 251 | namespaceImport: importClause as string, 252 | moduleSpecifier: moduleSpecifier, 253 | }); 254 | 255 | break; 256 | } 257 | 258 | apply && source.saveSync(); 259 | } 260 | 261 | /** 262 | * Generate class declarations from list 263 | * @param source 264 | * @param list 265 | * @param apply 266 | */ 267 | export function classDeclarationGeneratorFromList( 268 | source: SourceFile, 269 | list: ClassInfo[], 270 | apply?: boolean 271 | ): void { 272 | list.forEach((classInfo) => source.addClass(classInfo)); 273 | apply && source.saveSync(); 274 | } 275 | 276 | /** 277 | * Remove class declarations by name list 278 | * @param source 279 | * @param names 280 | * @param apply 281 | */ 282 | export function removeClassDeclarations( 283 | source: SourceFile, 284 | names: string[], 285 | apply?: boolean 286 | ): void { 287 | source 288 | .getClasses() 289 | .filter((classDec) => names.includes(classDec.getName()!)) 290 | .forEach((classDec) => { 291 | classDec.remove(); 292 | }); 293 | 294 | apply && source.saveSync(); 295 | } 296 | 297 | /** 298 | * Add resolver related import, create resolver class, add buildSchemaSync 299 | * @param source 300 | * @param rootType 301 | * @returns 302 | */ 303 | export function createTmpResolverContent( 304 | source: SourceFile, 305 | checkerOptions: CheckerOptions, 306 | rootType: string 307 | ): { 308 | resolverClass: ClassDeclaration; 309 | buildSchemaStatements: Statement[]; 310 | } { 311 | addImportDeclaration( 312 | source, 313 | undefined, 314 | CHECKER_MODULE_SPECIFIER, 315 | ImportType.DEFAULT_IMPORT 316 | ); 317 | 318 | appendNamedImportsMember(source, CHECKER_IMPORTS, BASE_MODULE_SPECIFIER); 319 | 320 | const resolverClass = source.addClass({ 321 | name: "TmpResolver", 322 | isExported: true, 323 | decorators: [ 324 | { 325 | name: "Resolver", 326 | arguments: [`(type)=>${rootType}`], 327 | }, 328 | ], 329 | methods: [ 330 | { 331 | name: "TmpResolver", 332 | isAsync: true, 333 | statements: ["return [];"], 334 | decorators: [ 335 | { 336 | name: "Query", 337 | arguments: [`(type)=>[${rootType}]`], 338 | }, 339 | ], 340 | returnType: `Promise<${rootType}[]>`, 341 | }, 342 | ], 343 | }); 344 | 345 | // TODO: enhancement 346 | const buildSchemaStatements = source.addStatements([ 347 | ` 348 | buildSchemaSync({ 349 | resolvers: [TmpResolver], 350 | emitSchemaFile: ${checkerOptions.buildSchemaOptions.emitSchemaFile}, 351 | skipCheck: false, 352 | nullableByDefault: ${checkerOptions.buildSchemaOptions.nullableByDefault}, 353 | dateScalarMode: ${checkerOptions.buildSchemaOptions.dateScalarMode} 354 | });`, 355 | ]); 356 | 357 | source.saveSync(); 358 | 359 | return { 360 | resolverClass, 361 | buildSchemaStatements, 362 | }; 363 | } 364 | --------------------------------------------------------------------------------