├── .github ├── FUNDING.yml ├── actions │ └── setup │ │ └── action.yml ├── dependabot.yml └── workflows │ ├── release.yml │ └── check.yml ├── .gitignore ├── .vscode └── settings.json ├── code.png ├── .husky └── pre-receive ├── .npmignore ├── src ├── index.ts ├── semigroup.ts ├── empty.ts ├── common.ts └── faker.ts ├── .changeset ├── config.json └── README.md ├── tests ├── common.ts ├── faker.test.ts ├── empty.test.ts └── semigroup.test.ts ├── tsconfig.json ├── .eslintrc.cjs ├── LICENSE ├── CHANGELOG.md ├── package.json └── README.md /.github/FUNDING.yml: -------------------------------------------------------------------------------- 1 | github: [jessekelly881] -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | node_modules/tsup 2 | node_modules 3 | .pnpm-debug.log 4 | dist 5 | -------------------------------------------------------------------------------- /.vscode/settings.json: -------------------------------------------------------------------------------- 1 | { 2 | "typescript.tsdk": "node_modules/typescript/lib" 3 | } -------------------------------------------------------------------------------- /code.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jessekelly881/effect-schema-compilers/HEAD/code.png -------------------------------------------------------------------------------- /.husky/pre-receive: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env sh 2 | . "$(dirname -- "$0")/_/husky.sh" 3 | 4 | pnpm lint 5 | -------------------------------------------------------------------------------- /.npmignore: -------------------------------------------------------------------------------- 1 | src 2 | test 3 | .husky 4 | .changeset 5 | .github 6 | pnpm-lock.yaml 7 | tsconfig.json 8 | vite.config.ts -------------------------------------------------------------------------------- /src/index.ts: -------------------------------------------------------------------------------- 1 | /** 2 | * @since 1.0.0 3 | */ 4 | 5 | /** 6 | * @since 1.0.0 7 | */ 8 | export * as Empty from "./empty"; 9 | 10 | /** 11 | * @since 1.0.0 12 | */ 13 | export * as Faker from "./faker"; 14 | 15 | /** 16 | * @since 1.0.0 17 | */ 18 | export * as Semigroup from "./semigroup"; 19 | -------------------------------------------------------------------------------- /.changeset/config.json: -------------------------------------------------------------------------------- 1 | { 2 | "$schema": "https://unpkg.com/@changesets/config@2.3.0/schema.json", 3 | "changelog": "@changesets/cli/changelog", 4 | "commit": false, 5 | "fixed": [], 6 | "linked": [], 7 | "access": "public", 8 | "baseBranch": "main", 9 | "updateInternalDependencies": "patch", 10 | "ignore": [] 11 | } 12 | -------------------------------------------------------------------------------- /tests/common.ts: -------------------------------------------------------------------------------- 1 | import * as S from "@effect/schema/Schema"; 2 | 3 | export interface Category { 4 | readonly name: string; 5 | readonly subcategories: ReadonlyArray; 6 | } 7 | 8 | export const Category: S.Schema = S.suspend(() => 9 | S.struct({ 10 | name: S.string, 11 | subcategories: S.array(Category) 12 | }) 13 | ); 14 | 15 | export enum Fruits { 16 | Apple, 17 | Banana 18 | } 19 | -------------------------------------------------------------------------------- /tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "compilerOptions": { 3 | "noEmit": true, 4 | "target": "ESNext", 5 | "module": "ESNext", 6 | "noImplicitAny": true, 7 | "removeComments": true, 8 | "moduleResolution": "Bundler", 9 | "preserveConstEnums": true, 10 | "skipLibCheck": true 11 | }, 12 | "include": [ 13 | "src/**/*", 14 | "tests/**/*" 15 | ], 16 | "exclude": [ 17 | "node_modules" 18 | ], 19 | } 20 | -------------------------------------------------------------------------------- /.changeset/README.md: -------------------------------------------------------------------------------- 1 | # Changesets 2 | 3 | Hello and welcome! This folder has been automatically generated by `@changesets/cli`, a build tool that works 4 | with multi-package repos, or single-package repos to help you version and publish your code. You can 5 | find the full documentation for it [in our repository](https://github.com/changesets/changesets) 6 | 7 | We have a quick list of common questions to get you started engaging with this project in 8 | [our documentation](https://github.com/changesets/changesets/blob/main/docs/common-questions.md) 9 | -------------------------------------------------------------------------------- /.github/actions/setup/action.yml: -------------------------------------------------------------------------------- 1 | name: Setup 2 | description: Perform standard setup and install dependencies using pnpm. 3 | inputs: 4 | node-version: 5 | description: The version of Node.js to install 6 | required: true 7 | default: 20.9.0 8 | 9 | runs: 10 | using: composite 11 | steps: 12 | - name: Install pnpm 13 | uses: pnpm/action-setup@v2 14 | with: 15 | version: 7 16 | 17 | - name: Install node 18 | uses: actions/setup-node@v3 19 | with: 20 | cache: pnpm 21 | node-version: ${{ inputs.node-version }} 22 | 23 | - name: Install dependencies 24 | shell: bash 25 | run: pnpm install --no-frozen-lockfile 26 | -------------------------------------------------------------------------------- /.github/dependabot.yml: -------------------------------------------------------------------------------- 1 | # Please see the documentation for all configuration options: 2 | # https://docs.github.com/github/administering-a-repository/configuration-options-for-dependency-updates 3 | 4 | version: 2 5 | updates: 6 | 7 | # Check for updates to npm packages 8 | - package-ecosystem: "npm" 9 | directory: "/" 10 | schedule: 11 | interval: "weekly" 12 | groups: 13 | npm: 14 | patterns: 15 | - "*" 16 | 17 | # Check for updates to GitHub Actions 18 | - package-ecosystem: "github-actions" 19 | directory: "/" 20 | schedule: 21 | interval: "weekly" 22 | groups: 23 | github-actions: 24 | patterns: 25 | - "*" -------------------------------------------------------------------------------- /.eslintrc.cjs: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | env: { 3 | browser: true, 4 | es2021: true 5 | }, 6 | extends: ["eslint:recommended", "plugin:@typescript-eslint/recommended"], 7 | overrides: [ 8 | { 9 | env: { 10 | node: true 11 | }, 12 | files: [".eslintrc.{js,cjs}"], 13 | parserOptions: { 14 | sourceType: "script" 15 | } 16 | } 17 | ], 18 | parser: "@typescript-eslint/parser", 19 | parserOptions: { 20 | ecmaVersion: "latest", 21 | sourceType: "module", 22 | project: "./tsconfig.json" 23 | }, 24 | plugins: ["@typescript-eslint", "deprecation"], 25 | rules: { 26 | "deprecation/deprecation": "warn", 27 | "@typescript-eslint/no-namespace": "off", 28 | "@typescript-eslint/no-explicit-any": "off", 29 | "no-mixed-spaces-and-tabs": "warn" 30 | } 31 | }; 32 | -------------------------------------------------------------------------------- /.github/workflows/release.yml: -------------------------------------------------------------------------------- 1 | name: Release 2 | on: 3 | push: 4 | branches: [main] 5 | 6 | concurrency: 7 | group: ${{ github.workflow }}-${{ github.ref }} 8 | 9 | jobs: 10 | release: 11 | name: Release 12 | runs-on: ubuntu-latest 13 | timeout-minutes: 10 14 | permissions: 15 | contents: write 16 | id-token: write 17 | pull-requests: write 18 | steps: 19 | - uses: actions/checkout@v4 20 | - name: Install dependencies 21 | uses: ./.github/actions/setup 22 | - run: pnpm run build 23 | - name: Create Release Pull Request or Publish 24 | uses: changesets/action@v1 25 | with: 26 | publish: pnpm run publish 27 | env: 28 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} 29 | NPM_TOKEN: ${{ secrets.NPM_TOKEN }} 30 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License Copyright (c) 2023 Jesse Kelly 2 | 3 | Permission is hereby granted, free of 4 | charge, to any person obtaining a copy of this software and associated 5 | documentation files (the "Software"), to deal in the Software without 6 | restriction, including without limitation the rights to use, copy, modify, merge, 7 | publish, distribute, sublicense, and/or sell copies of the Software, and to 8 | permit persons to whom the Software is furnished to do so, subject to the 9 | following conditions: 10 | 11 | The above copyright notice and this permission notice 12 | (including the next paragraph) shall be included in all copies or substantial 13 | portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF 16 | ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF 17 | MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO 18 | EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR 19 | OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING 20 | FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN 21 | THE SOFTWARE. -------------------------------------------------------------------------------- /CHANGELOG.md: -------------------------------------------------------------------------------- 1 | # @kellyjesse881/empty 2 | 3 | ## 0.0.23 4 | 5 | ### Patch Changes 6 | 7 | - 5514f5c: updated effect 8 | 9 | ## 0.0.22 10 | 11 | ### Patch Changes 12 | 13 | - 14ba807: removed json schema compiler 14 | - 14ba807: updated effect packages 15 | 16 | ## 0.0.21 17 | 18 | ### Patch Changes 19 | 20 | - 45ae860: updated package exports 21 | - f2f737e: added arbitrary object Equivalence 22 | - 7b390b4: faker: added support for Date 23 | 24 | ## 0.0.20 25 | 26 | ### Patch Changes 27 | 28 | - d3ac6e6: updated effect packages 29 | 30 | ## 0.0.19 31 | 32 | ### Patch Changes 33 | 34 | - acd9e85: updated effect packages 35 | 36 | ## 0.0.18 37 | 38 | ### Patch Changes 39 | 40 | - 90212cd: corrected compiler type signatures 41 | 42 | ## 0.0.17 43 | 44 | ### Patch Changes 45 | 46 | - 062ac5d: updated @effect deps 47 | 48 | ## 0.0.16 49 | 50 | ### Patch Changes 51 | 52 | - 5a510bb: feat: added json-schema compiler 53 | 54 | ## 0.0.15 55 | 56 | ### Patch Changes 57 | 58 | - ac19e0e: added support for array filters 59 | - 064e797: added support for bigint filters 60 | 61 | ## 0.0.14 62 | 63 | ### Patch Changes 64 | 65 | - 3756db4: added support for filters 66 | 67 | ## 0.0.13 68 | 69 | ### Patch Changes 70 | 71 | - 6ea24c7: added fakerjs 72 | 73 | ## 0.0.12 74 | 75 | ### Patch Changes 76 | 77 | - a223646: added fakerjs compiler 78 | - c762d84: made to, from lazy after supplying schema 79 | 80 | ## 0.0.11 81 | 82 | ### Patch Changes 83 | 84 | - e8fc86a: improved testing and lazy support 85 | 86 | ## 0.0.10 87 | 88 | ### Patch Changes 89 | 90 | - de7b08c: added semigroup compiler 91 | - 01a2d9b: added to,from compilers 92 | 93 | ## 0.0.9 94 | 95 | ### Patch Changes 96 | 97 | - e45ca97: fixed empty() type 98 | 99 | ## 0.0.8 100 | 101 | ### Patch Changes 102 | 103 | - 7cfa319: updated package.json 104 | 105 | ## 0.0.7 106 | 107 | ### Patch Changes 108 | 109 | - 79791a6: fixed gh action 110 | 111 | ## 0.0.6 112 | 113 | ### Patch Changes 114 | 115 | - 997c734: publish test 116 | 117 | ## 0.0.5 118 | 119 | ### Patch Changes 120 | 121 | - fb670fe: . 122 | 123 | ## 0.0.3 124 | 125 | ### Patch Changes 126 | 127 | - d94f379: init 128 | 129 | ## 0.0.4 130 | 131 | ### Patch Changes 132 | 133 | - 897cf90: fixed tsconfig 134 | 135 | ## 0.0.3 136 | 137 | ### Patch Changes 138 | 139 | - bea7d29: added .npmignore 140 | 141 | ## 0.0.2 142 | 143 | ### Patch Changes 144 | 145 | - cf3442c: init 146 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "effect-schema-compilers", 3 | "version": "0.0.23", 4 | "description": "", 5 | "type": "module", 6 | "homepage": "https://github.com/jessekelly881/effect-schema-compilers", 7 | "private": false, 8 | "files": [ 9 | "dist/**" 10 | ], 11 | "tags": [ 12 | "typescript", 13 | "schema", 14 | "validation" 15 | ], 16 | "keywords": [ 17 | "typescript", 18 | "schema", 19 | "validation" 20 | ], 21 | "main": "dist/index.js", 22 | "scripts": { 23 | "build": "tsup src/**/*.ts --format cjs,esm --dts", 24 | "lint": "eslint 'src/*.{ts,tsx}'", 25 | "dev": "vitest", 26 | "test": "vitest run", 27 | "ci": "pnpm run lint && pnpm run test && pnpm run build", 28 | "publish": "changeset publish", 29 | "check": "tsc -b tsconfig.json" 30 | }, 31 | "author": "Jesse Kelly", 32 | "license": "MIT", 33 | "devDependencies": { 34 | "@changesets/cli": "^2.27.1", 35 | "@effect/docgen": "^0.4.0", 36 | "@faker-js/faker": "^8.4.1", 37 | "@typescript-eslint/eslint-plugin": "^7.1.1", 38 | "@typescript-eslint/parser": "^7.1.1", 39 | "ajv": "^8.12.0", 40 | "build-utils": "^2.0.12", 41 | "eslint": "^8.57.0", 42 | "eslint-plugin-deprecation": "^2.0.0", 43 | "husky": "^9.0.11", 44 | "tsup": "^8.0.2", 45 | "vitest": "^1.4.0" 46 | }, 47 | "dependencies": { 48 | "@effect/schema": "^0.64.5", 49 | "@effect/typeclass": "^0.23.2", 50 | "effect": "2.4.15", 51 | "fast-check": "^3.16.0", 52 | "typescript": "^5.4.2" 53 | }, 54 | "exports": { 55 | ".": { 56 | "types": "./dist/index.d.ts", 57 | "module": "./dist/index.js", 58 | "import": "./dist/index.cjs", 59 | "default": "./dist/index.cjs" 60 | }, 61 | "./package.json": "./package.json", 62 | "./Empty": { 63 | "types": "./dist/empty.d.ts", 64 | "module": "./dist/empty.js", 65 | "import": "./dist/empty.cjs", 66 | "default": "./dist/empty.cjs" 67 | }, 68 | "./Equivalence": { 69 | "types": "./dist/equivalence.d.ts", 70 | "module": "./dist/equivalence.js", 71 | "import": "./dist/equivalence.cjs", 72 | "default": "./dist/equivalence.cjs" 73 | }, 74 | "./JsonSchema": { 75 | "types": "./dist/jsonSchema.d.ts", 76 | "module": "./dist/jsonSchema.js", 77 | "import": "./dist/jsonSchema.cjs", 78 | "default": "./dist/jsonSchema.cjs" 79 | }, 80 | "./Semigroup": { 81 | "types": "./dist/semigroup.d.ts", 82 | "module": "./dist/semigroup.js", 83 | "import": "./dist/semigroup.cjs", 84 | "default": "./dist/semigroup.cjs" 85 | }, 86 | "./Faker": { 87 | "types": "./dist/faker.d.ts", 88 | "module": "./dist/faker.js", 89 | "import": "./dist/faker.cjs", 90 | "default": "./dist/faker.cjs" 91 | } 92 | } 93 | } -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # effect-schema-compilers 2 | 3 | Compilers for @effect/schema. Currently in **alpha**; although, the api is pretty simple and probably won't change too much. For a collection of types preconfigured to work with these types see [effect-types](https://github.com/jessekelly881/effect-types) 4 | 5 | [![Npm package monthly downloads](https://badgen.net/npm/dm/effect-schema-compilers)](https://npmjs.com/package/effect-schema-compilers) 6 | 7 | ![img](code.png) 8 | 9 | ## Current TODOs 10 | 11 | - Convert to monorepo to allow supporting multiple compilers without dep issues. 12 | - Create compiler for avro 13 | 14 | ## Empty 15 | 16 | Generate "empty" values from a Schema. Similar to [zod-empty](https://github.com/toiroakr/zod-empty) with a similar motivation. 17 | 18 | ```ts 19 | import * as E from "effect-schema-compilers/dist/empty"; 20 | 21 | const s = E.to(S.struct({ num: S.number, str: S.string }))(); // { num: 0, str: "" } 22 | ``` 23 | 24 | Also supports setting the empty value for a schema. E.g. 25 | 26 | ```ts 27 | import * as E from "effect-schema-compilers/dist/empty"; 28 | import { pipe } from "@effect/data/Function"; 29 | 30 | const s = pipe(S.number, E.empty(() => 1), E.to()) // 1 31 | ``` 32 | 33 | ## Semigroup 34 | 35 | Generates a [Semigroup](https://effect-ts.github.io/data/modules/typeclass/Semigroup.ts.html) from the provided Schema. The default Semigroup.last is used which simply overrides the previous value. 36 | 37 | ```ts 38 | import * as S from "@effect/schema/Schema"; 39 | import * as _ from "effect-schema-compilers/dist/semigroup"; 40 | 41 | const schema = S.struct({ a: S.number, b: S.string }); 42 | const { combine } = _.to(schema)() 43 | expect(combine({ a: 0, b: "0" }, { a: 1, b: "1" })).toEqual({ a: 1, b: "1" }) 44 | ``` 45 | 46 | The semigroup for a Schema can be set using the semigroup() fn. For example, 47 | 48 | ```ts 49 | import * as S from "@effect/schema/Schema"; 50 | import * as Semi from "@effect/typeclass/Semigroup"' 51 | import * as _ from "effect-schema-compilers/dist/semigroup"; 52 | import { pipe } from "@effect/data/Function"; 53 | 54 | const schema = S.struct({ 55 | a: pipe(S.number, _.semigroup(Semi.min(n.Order))), 56 | b: pipe(S.string, _.semigroup(Semi.string)), 57 | c: S.boolean 58 | }); 59 | 60 | const { combine } = _.to(schema)() 61 | expect(combine({ a: 0, b: "0", c: true }, { a: 1, b: "1", c: false })).toEqual({ a: 0, b: "01", c: false }) 62 | ``` 63 | 64 | ## Fakerjs 65 | 66 | Generates realistic objects from a Schema using [fakerjs](@fakerjs/faker). 67 | 68 | ```ts 69 | import * as F from '@faker-js/faker'; 70 | import * as S from "@effect/schema/Schema" 71 | import { pipe } from "@effect/data/Function"; 72 | import * as _ from "effect-schema-compilers/dist/faker"; 73 | 74 | const Person = S.struct({ 75 | name: pipe(S.string, _.faker(f => f.person.fullName())), 76 | age: pipe(S.number, S.int(), S.greaterThanOrEqualTo(18), S.lessThanOrEqualTo(120)), 77 | sex: S.literal("male", "female") 78 | }); 79 | 80 | const fakeData = _.to(Person)(F.faker) // { name: "Seth Gottlieb", age: 36, sex: "male" } 81 | ``` 82 | -------------------------------------------------------------------------------- /.github/workflows/check.yml: -------------------------------------------------------------------------------- 1 | name: Check 2 | on: 3 | workflow_dispatch: 4 | pull_request: 5 | branches: [main] 6 | push: 7 | branches: [main] 8 | 9 | concurrency: 10 | group: ${{ github.workflow }}-${{ github.ref }} 11 | cancel-in-progress: true 12 | 13 | jobs: 14 | build: 15 | name: Build 16 | runs-on: ubuntu-latest 17 | timeout-minutes: 10 18 | steps: 19 | - uses: actions/checkout@v4 20 | - name: Install dependencies 21 | uses: ./.github/actions/setup 22 | - run: pnpm build 23 | 24 | types: 25 | name: Types 26 | runs-on: ubuntu-latest 27 | timeout-minutes: 5 28 | steps: 29 | - uses: actions/checkout@v4 30 | - name: Install dependencies 31 | uses: ./.github/actions/setup 32 | - run: pnpm check 33 | 34 | lint: 35 | name: Lint 36 | runs-on: ubuntu-latest 37 | timeout-minutes: 5 38 | steps: 39 | - uses: actions/checkout@v4 40 | - name: Install dependencies 41 | uses: ./.github/actions/setup 42 | - run: pnpm lint 43 | 44 | test: 45 | name: Test (${{ matrix.runtime }}) 46 | runs-on: ubuntu-latest 47 | timeout-minutes: 5 48 | strategy: 49 | fail-fast: false 50 | matrix: 51 | runtime: [Node, Bun] 52 | steps: 53 | - uses: actions/checkout@v4 54 | - name: Install dependencies 55 | uses: ./.github/actions/setup 56 | - uses: oven-sh/setup-bun@v2 57 | if: matrix.runtime == 'Bun' 58 | with: 59 | bun-version: 1.0.25 60 | - name: Test 61 | run: pnpm vitest 62 | if: matrix.runtime == 'Node' 63 | - name: Test 64 | run: bun vitest 65 | if: matrix.runtime == 'Bun' 66 | 67 | analyze: 68 | name: Analyze 69 | runs-on: ${{ (matrix.language == 'swift' && 'macos-latest') || 'ubuntu-latest' }} 70 | timeout-minutes: ${{ (matrix.language == 'swift' && 120) || 360 }} 71 | permissions: 72 | security-events: write 73 | actions: read 74 | contents: read 75 | 76 | strategy: 77 | fail-fast: false 78 | matrix: 79 | language: ["javascript-typescript"] 80 | 81 | steps: 82 | - name: Checkout repository 83 | uses: actions/checkout@v4 84 | - name: Initialize CodeQL 85 | uses: github/codeql-action/init@v3 86 | with: 87 | languages: ${{ matrix.language }} 88 | queries: security-extended 89 | 90 | - name: Autobuild 91 | uses: github/codeql-action/autobuild@v3 92 | - name: Perform CodeQL Analysis 93 | uses: github/codeql-action/analyze@v3 94 | with: 95 | category: "/language:${{matrix.language}}" 96 | -------------------------------------------------------------------------------- /src/semigroup.ts: -------------------------------------------------------------------------------- 1 | import * as AST from "@effect/schema/AST"; 2 | import * as S from "@effect/schema/Schema"; 3 | import * as Semi from "@effect/typeclass/Semigroup"; 4 | import * as O from "effect/Option"; 5 | import * as RA from "effect/ReadonlyArray"; 6 | import { createHookId, memoizeThunk } from "./common"; 7 | 8 | export const SemigroupHookId = createHookId("SemigroupHookId"); 9 | 10 | export const semigroup = ( 11 | semigroup: Semi.Semigroup 12 | ): ((self: S.Schema) => S.Schema) => 13 | S.annotations({ [SemigroupHookId]: semigroup }); 14 | 15 | const getAnnotation = 16 | AST.getAnnotation>(SemigroupHookId); 17 | 18 | interface Semigroup { 19 | (): Semi.Semigroup; 20 | } 21 | 22 | /** 23 | * @description 24 | * Generates a Semigroup from a given Schema. By default all values implement Semigroup.last so by default values are just overridden. 25 | */ 26 | export const make = (schema: S.Schema): Semigroup => 27 | go(schema.ast); 28 | 29 | const go = (ast: AST.AST): Semigroup => { 30 | const annotations = getAnnotation(ast); 31 | if (annotations._tag === "Some") { 32 | return () => annotations.value; 33 | } 34 | 35 | switch (ast._tag) { 36 | case "NeverKeyword": 37 | throw new Error("cannot build a Semigroup for `never`"); 38 | 39 | case "Literal": 40 | case "ObjectKeyword": 41 | case "BigIntKeyword": 42 | case "NumberKeyword": 43 | case "StringKeyword": 44 | case "BooleanKeyword": 45 | case "Enums": 46 | case "Union": 47 | case "SymbolKeyword": 48 | case "UniqueSymbol": 49 | case "UndefinedKeyword": 50 | case "UnknownKeyword": 51 | case "VoidKeyword": 52 | case "AnyKeyword": 53 | case "TemplateLiteral": 54 | case "Declaration": 55 | return () => Semi.last(); 56 | 57 | case "Refinement": 58 | return go(ast.from); 59 | case "Transformation": 60 | return go(ast.to); 61 | 62 | case "Suspend": { 63 | const get = memoizeThunk(() => go(ast.f())); 64 | return () => get()(); 65 | } 66 | 67 | case "TupleType": { 68 | const els = ast.elements.map((e) => go(e.type)); 69 | const rest = ast.rest; 70 | 71 | return () => 72 | Semi.make((self: [], that: []) => { 73 | const output: any = []; 74 | 75 | const es = els.map((e) => e()); 76 | 77 | // elements 78 | for (let i = 0; i < es.length; i++) { 79 | const { combine } = es[i]; 80 | const result = combine(self[i], that[i]); 81 | output.push(result); 82 | } 83 | 84 | const values = RA.fromIterable(rest.values()); 85 | const tail = RA.tail(values).pipe( 86 | O.map((ts) => ts.map((e) => go(e))), 87 | O.getOrElse(() => []) 88 | ); 89 | const minLen = tail.length + els.length; // min len of tuple 90 | 91 | // rest head 92 | const thatRestLen = that.length - minLen; 93 | 94 | for (let h = 0; h < thatRestLen; h++) { 95 | output.push(that[els.length + h]); 96 | } 97 | 98 | // rest tail 99 | for (let t = 0; t < tail.length; t++) { 100 | const { combine } = tail[t](); 101 | const result = combine( 102 | self[self.length - tail.length + t], 103 | that[that.length - tail.length + t] 104 | ); 105 | output.push(result); 106 | } 107 | 108 | return output; 109 | }); 110 | } 111 | 112 | case "TypeLiteral": { 113 | const propertySignaturesTypes = ast.propertySignatures.map((f) => 114 | go(f.type) 115 | ); 116 | const output: any = {}; 117 | 118 | return () => { 119 | for (let i = 0; i < propertySignaturesTypes.length; i++) { 120 | const ps = ast.propertySignatures[i]; 121 | const name = ps.name; 122 | output[name] = propertySignaturesTypes[i](); 123 | } 124 | 125 | return Semi.struct(output); 126 | }; 127 | } 128 | } 129 | }; 130 | -------------------------------------------------------------------------------- /tests/faker.test.ts: -------------------------------------------------------------------------------- 1 | import * as S from "@effect/schema/Schema"; 2 | import * as F from "@faker-js/faker"; 3 | import { pipe } from "effect/Function"; 4 | import { describe, expect, it } from "vitest"; 5 | import * as _ from "../src/faker"; 6 | import { Category, Fruits } from "./common"; 7 | 8 | /** 9 | * Test a given schema 10 | */ 11 | const schema = (name: string, schema: S.Schema) => { 12 | it(name, () => { 13 | const fake = _.make(schema)(F.faker); 14 | // @ts-ignore 15 | S.asserts(schema)(fake); 16 | }); 17 | }; 18 | 19 | describe("faker", () => { 20 | schema("literal", S.literal("a", "b")); 21 | schema("boolean", S.boolean); 22 | schema("number", S.number); 23 | schema("bigint", S.bigint); 24 | schema("string", S.string); 25 | schema("string/ length", S.string.pipe(S.length(10))); 26 | schema("string/ min, max", S.string.pipe(S.minLength(30), S.maxLength(50))); 27 | schema("string/ pattern", pipe(S.string, S.pattern(/hello-[1-5]/))); 28 | schema("symbol", S.symbol); 29 | schema("union", S.union(S.number, S.string)); 30 | schema("record", S.record(S.string, S.number)); 31 | schema("enum", S.enums(Fruits)); 32 | schema("array", S.array(S.string)); 33 | schema("array/ itemsCount", pipe(S.array(S.string), S.itemsCount(10))); 34 | schema("lazy", Category); 35 | schema("Date", S.Date); 36 | schema( 37 | "DateFromSelf", 38 | S.DateFromSelf.pipe(_.faker((f) => f.date.recent())) 39 | ); 40 | 41 | schema("templateLiteral. a", S.templateLiteral(S.literal("a"))); 42 | schema("templateLiteral. ${string}", S.templateLiteral(S.string)); 43 | 44 | schema( 45 | "templateLiteral. a b", 46 | S.templateLiteral(S.literal("a"), S.literal(" "), S.literal("b")) 47 | ); 48 | 49 | schema( 50 | "templateLiteral. a${string}", 51 | S.templateLiteral(S.literal("a"), S.string) 52 | ); 53 | schema( 54 | "templateLiteral. a${string}b", 55 | S.templateLiteral(S.literal("a"), S.string, S.literal("b")) 56 | ); 57 | 58 | schema( 59 | "templateLiteral. a${string*}b", 60 | S.templateLiteral( 61 | S.literal("a"), 62 | S.string.pipe( 63 | _.faker((f) => f.string.alpha({ length: { min: 0, max: 10 } })) 64 | ), 65 | S.literal("b") 66 | ) 67 | ); 68 | 69 | schema("number/ int", S.number.pipe(S.int())); 70 | schema("number/ (0, 5)", S.number.pipe(S.greaterThan(0), S.lessThan(5))); 71 | 72 | schema( 73 | "number/ int (0, 5)", 74 | S.number.pipe(S.int(), S.greaterThan(0), S.lessThan(5)) 75 | ); 76 | 77 | schema( 78 | "number/ int [0, 5]", 79 | S.number.pipe( 80 | S.int(), 81 | S.greaterThanOrEqualTo(0), 82 | S.lessThanOrEqualTo(5) 83 | ) 84 | ); 85 | 86 | schema( 87 | "bigint/ (0, 5)", 88 | S.bigint.pipe(S.greaterThanBigint(0n), S.lessThanBigint(5n)) 89 | ); 90 | 91 | schema( 92 | "record. ", 93 | S.record( 94 | S.templateLiteral(S.literal("a"), S.string, S.literal("b")), 95 | S.number 96 | ) 97 | ); 98 | 99 | it("never", () => { 100 | expect(() => _.make(S.never)(F.faker)).toThrowError( 101 | new Error("cannot build a Faker for `never`") 102 | ); 103 | }); 104 | 105 | schema( 106 | "transform", 107 | pipe( 108 | S.string, 109 | S.transform( 110 | S.tuple(S.string), 111 | (s) => [s] as readonly [string], 112 | ([s]) => s 113 | ) 114 | ) 115 | ); 116 | 117 | schema("tuple", S.tuple([S.string, S.number], S.boolean, S.string)); 118 | 119 | schema( 120 | "struct", 121 | pipe( 122 | S.struct({ 123 | a: S.string, 124 | b: S.number, 125 | c: pipe(S.nonEmptyArray(S.number)), 126 | d: S.optional(S.boolean) 127 | }) 128 | ) 129 | ); 130 | 131 | schema( 132 | "struct - partial", 133 | S.partial(S.struct({ a: S.string, b: S.number })) 134 | ); 135 | 136 | schema( 137 | "struct - extra props", 138 | pipe( 139 | S.struct({ a: S.number, b: S.number }), 140 | S.extend(S.record(S.string, S.number)) 141 | ) 142 | ); 143 | 144 | it("example", () => { 145 | const Person = S.struct({ 146 | name: pipe( 147 | S.string, 148 | _.faker((f) => f.person.fullName()) 149 | ), 150 | age: pipe( 151 | S.number, 152 | S.int(), 153 | S.greaterThanOrEqualTo(18), 154 | S.lessThanOrEqualTo(120) 155 | ), 156 | sex: pipe(S.literal("male", "female")) 157 | }); 158 | 159 | F.faker.seed(25); 160 | const fakeData = _.make(Person)(F.faker); 161 | expect(fakeData).toEqual({ 162 | name: "Seth Gottlieb", 163 | age: 36, 164 | sex: "male" 165 | }); 166 | 167 | F.faker.seed(); // Unset seed for later tests 168 | }); 169 | }); 170 | -------------------------------------------------------------------------------- /src/empty.ts: -------------------------------------------------------------------------------- 1 | import * as AST from "@effect/schema/AST"; 2 | import * as S from "@effect/schema/Schema"; 3 | import * as O from "effect/Option"; 4 | import * as RA from "effect/ReadonlyArray"; 5 | import { 6 | Constraints, 7 | combineConstraints, 8 | createHookId, 9 | getConstraints 10 | } from "./common"; 11 | 12 | export const EmptyHookId = createHookId("EmptyHookId"); 13 | 14 | interface Empty { 15 | (): A; 16 | } 17 | 18 | export const empty = ( 19 | empty: () => E 20 | ): ((self: S.Schema) => S.Schema) => 21 | S.annotations({ [EmptyHookId]: empty }); 22 | 23 | const getAnnotation = AST.getAnnotation<() => unknown>(EmptyHookId); 24 | 25 | export const make = (schema: S.Schema): Empty => go(schema.ast); 26 | 27 | /** @internal */ 28 | const go = (ast: AST.AST, constraints?: Constraints): Empty => { 29 | const annotations = getAnnotation(ast); 30 | if (annotations._tag === "Some") { 31 | return annotations.value; 32 | } 33 | 34 | switch (ast._tag) { 35 | case "NeverKeyword": 36 | throw new Error("cannot build an Empty for `never`"); 37 | 38 | case "Literal": 39 | return () => ast.literal; 40 | case "ObjectKeyword": 41 | return () => ({}); 42 | case "TupleType": { 43 | const els = ast.elements 44 | .filter((e) => !e.isOptional) 45 | .map((e) => go(e.type)); 46 | const rest = ast.rest; 47 | 48 | let minItems = 0; 49 | if (constraints && constraints._tag === "ArrayConstraints") { 50 | if (constraints.constraints.minItems) 51 | minItems = constraints.constraints.minItems; 52 | } 53 | 54 | return () => { 55 | const values = RA.fromIterable(rest.values()); 56 | const head = RA.head(values).pipe(O.map(go)); 57 | const tail = RA.tail(values).pipe( 58 | O.map((asts) => asts.map((e) => go(e))), 59 | O.getOrElse(() => []) 60 | ); 61 | 62 | const requiredElsCount = els.length + tail.length; 63 | const minRestSize = Math.max(minItems - requiredElsCount, 0); 64 | 65 | const s = O.all(RA.range(1, minRestSize).map(() => head)).pipe( 66 | O.getOrElse(() => [] as Empty[]) 67 | ); 68 | const restEls = minRestSize > 0 ? s : []; 69 | 70 | return [ 71 | ...els.map((el) => el()), 72 | ...restEls.map((el) => el()), 73 | ...tail.map((el) => el()) 74 | ]; 75 | }; 76 | } 77 | case "BigIntKeyword": 78 | return () => { 79 | if (constraints && constraints._tag === "BigintConstraints") { 80 | if (constraints.constraints.min) 81 | return constraints.constraints.min; 82 | } 83 | 84 | return 0n; 85 | }; 86 | case "NumberKeyword": 87 | return () => { 88 | if (constraints && constraints._tag === "NumberConstraints") { 89 | if (constraints.constraints.min) 90 | return constraints.constraints.min; 91 | if (constraints.constraints.exclusiveMin) 92 | return constraints.constraints.isInt 93 | ? constraints.constraints.exclusiveMin + 1 94 | : constraints.constraints.exclusiveMin; 95 | } 96 | 97 | return 0; 98 | }; 99 | case "StringKeyword": 100 | return () => { 101 | return constraints && constraints._tag === "StringConstraints" 102 | ? constraints.constraints.minLength 103 | ? " ".repeat(constraints.constraints.minLength) 104 | : "" 105 | : ""; 106 | }; 107 | case "BooleanKeyword": 108 | return () => false; 109 | case "Refinement": 110 | return go( 111 | ast.from, 112 | combineConstraints(constraints, getConstraints(ast)) 113 | ); 114 | case "Transformation": 115 | return go( 116 | ast.to, 117 | combineConstraints(constraints, getConstraints(ast)) 118 | ); 119 | case "Declaration": 120 | throw new Error( 121 | `cannot build an Empty for a declaration without annotations (${ast})` 122 | ); 123 | case "Enums": 124 | return () => ast.enums[0][1]; 125 | case "Union": 126 | return go(ast.types[0]); // TODO: Pick the "simplest" value 127 | case "Suspend": 128 | return () => go(ast.f())(); 129 | case "TemplateLiteral": { 130 | const components = [ast.head]; 131 | for (const span of ast.spans) { 132 | components.push(span.literal); 133 | } 134 | return () => components.join(""); 135 | } 136 | case "SymbolKeyword": 137 | return () => Symbol(); 138 | case "UniqueSymbol": 139 | return () => ast.symbol; 140 | case "TypeLiteral": { 141 | const propertySignaturesTypes = ast.propertySignatures.map((f) => 142 | go(f.type) 143 | ); 144 | const output: any = {}; 145 | 146 | return () => { 147 | for (let i = 0; i < propertySignaturesTypes.length; i++) { 148 | const ps = ast.propertySignatures[i]; 149 | const name = ps.name; 150 | if (!ps.isOptional) { 151 | output[name] = propertySignaturesTypes[i](); 152 | } 153 | } 154 | 155 | return output; 156 | }; 157 | } 158 | case "UndefinedKeyword": 159 | case "UnknownKeyword": 160 | case "VoidKeyword": 161 | case "AnyKeyword": 162 | return () => undefined; 163 | } 164 | }; 165 | -------------------------------------------------------------------------------- /tests/empty.test.ts: -------------------------------------------------------------------------------- 1 | import { AST } from "@effect/schema"; 2 | import * as S from "@effect/schema/Schema"; 3 | import { pipe } from "effect/Function"; 4 | import { describe, expect, it } from "vitest"; 5 | import * as _ from "../src/empty"; 6 | import { Category, Fruits } from "./common"; 7 | 8 | const expectEmptyValue = (schema: S.Schema, value: A) => { 9 | const computed = _.make(schema)(); 10 | expect(computed).toEqual(value); 11 | }; 12 | 13 | describe("empty", () => { 14 | it("void", () => expectEmptyValue(S.void, undefined)); 15 | it("any", () => expectEmptyValue(S.any, undefined)); 16 | it("unknown", () => expectEmptyValue(S.unknown, undefined)); 17 | it("number", () => expectEmptyValue(S.number, 0)); 18 | it("bigint", () => expectEmptyValue(S.bigint, 0n)); 19 | it("string", () => expectEmptyValue(S.string, "")); 20 | it("boolean", () => expectEmptyValue(S.boolean, false)); 21 | it("enum", () => expectEmptyValue(S.enums(Fruits), Fruits.Apple)); 22 | it("literal", () => expectEmptyValue(S.literal("a", "b"), "a")); 23 | it("record", () => expectEmptyValue(S.record(S.string, S.number), {})); 24 | it("array", () => expectEmptyValue(S.array(S.string), [])); 25 | it("nonEmptyArray", () => 26 | expectEmptyValue(S.nonEmptyArray(S.string), [""])); 27 | it("object", () => expectEmptyValue(S.object, {})); 28 | 29 | it("templateLiteral. a", () => 30 | expectEmptyValue(S.templateLiteral(S.literal("a")), "a")); 31 | it("templateLiteral. ${string}", () => 32 | expectEmptyValue(S.templateLiteral(S.string), "")); 33 | it("templateLiteral. a${string}", () => 34 | expectEmptyValue(S.templateLiteral(S.literal("a"), S.string), "a")); 35 | it("templateLiteral. a${string}b", () => 36 | expectEmptyValue( 37 | S.templateLiteral(S.literal("a"), S.string, S.literal("b")), 38 | "ab" 39 | )); 40 | 41 | // filters 42 | it("number/ greaterThan", () => 43 | expectEmptyValue(pipe(S.number, S.greaterThan(4)), 4)); 44 | it("number/ greaterThanOrEqualTo", () => 45 | expectEmptyValue(pipe(S.number, S.greaterThanOrEqualTo(4)), 4)); 46 | it("number/ int, greaterThan", () => 47 | expectEmptyValue(pipe(S.number, S.int(), S.greaterThan(4)), 5)); 48 | it("bigint/ greaterThan", () => 49 | expectEmptyValue(pipe(S.bigint, S.greaterThanBigint(4n)), 5n)); 50 | it("bigint/ greaterThanOrEqualTo", () => 51 | expectEmptyValue(pipe(S.bigint, S.greaterThanOrEqualToBigint(4n)), 4n)); 52 | it("string/ minLength", () => 53 | expectEmptyValue(pipe(S.string, S.minLength(2)), " ")); 54 | it("array/ minItems", () => 55 | expectEmptyValue(pipe(S.array(S.string), S.minItems(2)), ["", ""])); 56 | 57 | it("ast", () => { 58 | const fn = () => ""; 59 | const ast = pipe(S.NumberFromString, _.empty(fn)).ast.annotations; 60 | expect(ast).toEqual({ 61 | [AST.IdentifierAnnotationId]: "NumberFromString", 62 | [_.EmptyHookId]: fn 63 | }); 64 | }); 65 | 66 | it("never", () => { 67 | expect(() => _.make(S.never)()).toThrowError( 68 | new Error("cannot build an Empty for `never`") 69 | ); 70 | }); 71 | 72 | it("custom", () => { 73 | const schema = pipe( 74 | S.number, 75 | _.empty(() => 1) 76 | ); 77 | expectEmptyValue(schema, 1); 78 | }); 79 | 80 | it("transform", () => { 81 | const schema = S.transform( 82 | S.string, 83 | S.tuple(S.string), 84 | (s) => [s] as readonly string[], 85 | ([s]) => s, 86 | { strict: false } 87 | ); 88 | 89 | expectEmptyValue(schema, [""]); 90 | }); 91 | 92 | it("uniqueSymbol", () => { 93 | const a = Symbol.for("test/a"); 94 | const schema = S.uniqueSymbolFromSelf(a); 95 | const emptyTo = _.make(schema)(); 96 | 97 | expect(emptyTo.toString()).toEqual(a.toString()); 98 | }); 99 | 100 | it("tuple/ e + r", () => { 101 | const schema = S.tuple([S.string, S.number], S.boolean); 102 | expectEmptyValue(schema, ["", 0]); 103 | }); 104 | 105 | it("tuple. e + e?", () => { 106 | const schema = S.tuple(S.string, S.optionalElement(S.number)); 107 | expectEmptyValue(schema, [""]); 108 | }); 109 | 110 | it("tuple. e? + r", () => { 111 | const schema = S.tuple([S.optionalElement(S.string)], S.number); 112 | expectEmptyValue(schema, []); 113 | }); 114 | 115 | it("tuple/ e + r + e", () => { 116 | const schema = S.tuple([S.string, S.number], S.boolean, S.string); 117 | expectEmptyValue(schema, ["", 0, ""]); 118 | }); 119 | 120 | it("struct", () => { 121 | const schema = pipe( 122 | S.struct({ 123 | a: S.string, 124 | b: S.number, 125 | c: S.array(S.number), 126 | d: S.optional(S.boolean), 127 | e: S.struct({ 128 | f: S.tuple(S.number, S.literal("literal")) 129 | }), 130 | g: pipe( 131 | S.string, 132 | _.empty(() => "/") 133 | ) 134 | }) 135 | ); 136 | const empty = _.make(schema)(); 137 | 138 | expect(empty).toEqual({ 139 | a: "", 140 | b: 0, 141 | c: [], 142 | e: { f: [0, "literal"] }, 143 | g: "/" 144 | }); 145 | }); 146 | 147 | it("struct - partial", () => { 148 | const schema = S.partial(S.struct({ a: S.string, b: S.number })); 149 | expectEmptyValue(schema, {}); 150 | }); 151 | 152 | it("union - discriminated", () => { 153 | const schema = S.union( 154 | S.struct({ type: S.literal("a"), a: S.string }), 155 | S.struct({ type: S.literal("b"), b: S.number }) 156 | ); 157 | 158 | expectEmptyValue(schema, { type: "a", a: "" }); 159 | }); 160 | 161 | it("symbol", () => { 162 | const schema = S.symbol; 163 | const empty = _.make(schema)(); 164 | 165 | expect(empty.toString()).toEqual(Symbol().toString()); 166 | }); 167 | 168 | it("lazy", () => { 169 | const schema = Category; 170 | const empty = _.make(schema)(); 171 | 172 | expect(empty).toEqual({ name: "", subcategories: [] }); 173 | }); 174 | }); 175 | -------------------------------------------------------------------------------- /tests/semigroup.test.ts: -------------------------------------------------------------------------------- 1 | import { AST } from "@effect/schema"; 2 | import * as A from "@effect/schema/Arbitrary"; 3 | import * as Eq from "@effect/schema/Equivalence"; 4 | import * as S from "@effect/schema/Schema"; 5 | import * as Semi from "@effect/typeclass/Semigroup"; 6 | import * as Boolean from "@effect/typeclass/data/Boolean"; 7 | import * as Number from "@effect/typeclass/data/Number"; 8 | import * as String from "@effect/typeclass/data/String"; 9 | import { pipe } from "effect/Function"; 10 | import * as n from "effect/Number"; 11 | import * as fc from "fast-check"; 12 | import { describe, expect, it } from "vitest"; 13 | import * as _ from "../src/semigroup"; 14 | import { Category, Fruits } from "./common"; 15 | 16 | /** 17 | * Tests that the generated Semigroup for a given Schema is a valid Semigroup 18 | */ 19 | const generatesValidSemigroup = (schema: S.Schema) => { 20 | const arb = A.make(schema)(fc); 21 | const eq = Eq.make(schema); 22 | const { combine } = _.make(schema)(); 23 | 24 | const associativity = fc.property(arb, arb, arb, (a, b, c) => 25 | eq(combine(combine(a, b), c), combine(a, combine(b, c))) 26 | ); 27 | 28 | fc.assert(associativity); 29 | }; 30 | 31 | describe("semigroup", () => { 32 | it("ast", () => { 33 | const ast = pipe(S.NumberFromString, _.semigroup(Number.SemigroupSum)) 34 | .ast.annotations; 35 | expect(ast).toEqual({ 36 | [_.SemigroupHookId]: Number.SemigroupSum, 37 | [AST.IdentifierAnnotationId]: "NumberFromString" 38 | }); 39 | }); 40 | 41 | it("never", () => { 42 | expect(() => _.make(S.never)()).toThrowError( 43 | new Error("cannot build a Semigroup for `never`") 44 | ); 45 | }); 46 | 47 | it("literal/ ", () => { 48 | const schema = S.literal("a", "b"); 49 | const { combine } = _.make(schema)(); 50 | 51 | generatesValidSemigroup(schema); 52 | expect(combine("a", "b")).toBe("b"); 53 | }); 54 | 55 | it("number/ ", () => { 56 | const schema = pipe(S.number, S.nonNaN()); 57 | const { combine } = _.make(schema)(); 58 | 59 | generatesValidSemigroup(schema); 60 | expect(combine(1, 2)).toBe(2); 61 | }); 62 | 63 | it("number/ min", () => { 64 | const schema = pipe( 65 | S.number, 66 | S.nonNaN(), 67 | _.semigroup(Semi.min(n.Order)) 68 | ); 69 | const { combine } = _.make(schema)(); 70 | 71 | generatesValidSemigroup(schema); 72 | expect(combine(1, 2)).toBe(1); 73 | }); 74 | 75 | it("string/ ", () => { 76 | const schema = S.string; 77 | const { combine } = _.make(schema)(); 78 | 79 | generatesValidSemigroup(schema); 80 | expect(combine("a", "b")).toBe("b"); 81 | }); 82 | 83 | it("string/ concat", () => { 84 | const schema = pipe(S.string, _.semigroup(String.Semigroup)); 85 | const { combine } = _.make(schema)(); 86 | 87 | generatesValidSemigroup(schema); 88 | expect(combine("a", "b")).toBe("ab"); 89 | }); 90 | 91 | it("string/ ", () => { 92 | const schema = S.Date; 93 | const { combine } = _.make(schema)(); 94 | 95 | generatesValidSemigroup(schema); 96 | const now = new Date(); 97 | expect(combine(now, now)).toBe(now); 98 | }); 99 | 100 | it("enum/ ", () => { 101 | const schema = S.enums(Fruits); 102 | const { combine } = _.make(schema)(); 103 | 104 | generatesValidSemigroup(schema); 105 | expect(combine(Fruits.Apple, Fruits.Banana)).toBe(Fruits.Banana); 106 | }); 107 | 108 | it("tuple/ e + r + e", () => { 109 | const schema = S.tuple([S.string], S.boolean, S.NonNaN, S.boolean); 110 | const { combine } = _.make(schema)(); 111 | 112 | generatesValidSemigroup(schema); 113 | expect(combine(["0", 0, false], ["1", 1, true])).toEqual([ 114 | "1", 115 | 1, 116 | true 117 | ]); 118 | expect(combine(["0", true, 0, false], ["1", 1, true])).toEqual([ 119 | "1", 120 | 1, 121 | true 122 | ]); 123 | expect(combine(["0", 0, false], ["1", true, 1, true])).toEqual([ 124 | "1", 125 | true, 126 | 1, 127 | true 128 | ]); 129 | expect( 130 | combine( 131 | ["0", true, true, true, true, 0, false], 132 | ["1", false, false, 1, true] 133 | ) 134 | ).toEqual(["1", false, false, 1, true]); 135 | }); 136 | 137 | it("tuple/ [min, max]", () => { 138 | const A = pipe(S.number, S.nonNaN(), _.semigroup(Semi.min(n.Order))); 139 | const B = pipe(S.number, S.nonNaN(), _.semigroup(Semi.max(n.Order))); 140 | 141 | const schema = S.tuple(A, B); 142 | const { combine } = _.make(schema)(); 143 | 144 | generatesValidSemigroup(schema); 145 | expect(combine([0, 1], [1, 2])).toEqual([0, 2]); 146 | }); 147 | 148 | it("array/ ", () => { 149 | const schema = S.array(S.string); 150 | const { combine } = _.make(schema)(); 151 | 152 | generatesValidSemigroup(schema); 153 | expect(combine(["0", "1"], ["0", "1", "2"])).toEqual(["0", "1", "2"]); 154 | }); 155 | 156 | it("struct/ ", () => { 157 | const schema = S.struct({ 158 | a: pipe(S.number, S.nonNaN()), 159 | b: S.string, 160 | c: S.optional(S.boolean) 161 | }); 162 | const { combine } = _.make(schema)(); 163 | 164 | generatesValidSemigroup(schema); 165 | expect(combine({ a: 0, b: "0" }, { a: 1, b: "1" })).toEqual({ 166 | a: 1, 167 | b: "1" 168 | }); 169 | expect(combine({ a: 0, b: "0", c: true }, { a: 1, b: "1" })).toEqual({ 170 | a: 1, 171 | b: "1" 172 | }); 173 | expect(combine({ a: 0, b: "0" }, { a: 1, b: "1", c: true })).toEqual({ 174 | a: 1, 175 | b: "1", 176 | c: true 177 | }); 178 | expect( 179 | combine({ a: 0, b: "0", c: true }, { a: 1, b: "1", c: false }) 180 | ).toEqual({ a: 1, b: "1", c: false }); 181 | }); 182 | 183 | it("boolean/ ", () => { 184 | const schema = S.boolean; 185 | const { combine } = _.make(schema)(); 186 | 187 | generatesValidSemigroup(schema); 188 | expect(combine(true, false)).toEqual(false); 189 | }); 190 | 191 | it("boolean/ any", () => { 192 | const schema = pipe(S.boolean, _.semigroup(Boolean.SemigroupSome)); 193 | const { combine } = _.make(schema)(); 194 | 195 | generatesValidSemigroup(schema); 196 | expect(combine(true, false)).toEqual(true); 197 | }); 198 | 199 | it("struct/ [min, concat]", () => { 200 | const schema = S.struct({ 201 | a: pipe(S.number, S.nonNaN(), _.semigroup(Semi.min(n.Order))), 202 | b: pipe(S.string, _.semigroup(String.Semigroup)), 203 | c: S.boolean 204 | }); 205 | 206 | const { combine } = _.make(schema)(); 207 | 208 | generatesValidSemigroup(schema); 209 | expect( 210 | combine({ a: 0, b: "0", c: true }, { a: 1, b: "1", c: false }) 211 | ).toEqual({ a: 0, b: "01", c: false }); 212 | }); 213 | 214 | it("lazy", () => { 215 | const s = _.make(Category)(); 216 | const a: Category = { 217 | name: "a", 218 | subcategories: [{ name: "a1", subcategories: [] }] 219 | }; 220 | const b: Category = { name: "b", subcategories: [] }; 221 | 222 | expect(s.combine(a, b)).toEqual({ name: "b", subcategories: [] }); 223 | }); 224 | }); 225 | -------------------------------------------------------------------------------- /src/common.ts: -------------------------------------------------------------------------------- 1 | import * as AST from "@effect/schema/AST"; 2 | import * as S from "@effect/schema/Schema"; 3 | import { isNumber } from "effect/Predicate"; 4 | 5 | export const createHookId = (id: string) => 6 | Symbol(`effect-schema-compilers/${id}`); 7 | 8 | /** 9 | * TODO: Replace with import from "@effect/schema/internal/common" when working 10 | */ 11 | export const memoizeThunk = (f: () => A): (() => A) => { 12 | let done = false; 13 | let a: A; 14 | return () => { 15 | if (done) { 16 | return a; 17 | } 18 | a = f(); 19 | done = true; 20 | return a; 21 | }; 22 | }; 23 | 24 | class NumberConstraints { 25 | readonly _tag = "NumberConstraints"; 26 | constructor( 27 | readonly constraints: { 28 | min?: number; 29 | exclusiveMin?: number; 30 | exclusiveMax?: number; 31 | max?: number; 32 | isInt?: boolean; 33 | } 34 | ) {} 35 | } 36 | 37 | class BigintConstraints { 38 | readonly _tag = "BigintConstraints"; 39 | constructor( 40 | readonly constraints: { 41 | min?: bigint; 42 | max?: bigint; 43 | } 44 | ) {} 45 | } 46 | 47 | class StringConstraints { 48 | readonly _tag = "StringConstraints"; 49 | constructor( 50 | readonly constraints: { 51 | minLength?: number; 52 | maxLength?: number; 53 | pattern?: RegExp; 54 | } 55 | ) {} 56 | } 57 | 58 | class ArrayConstraints { 59 | readonly _tag = "ArrayConstraints"; 60 | constructor( 61 | readonly constraints: { 62 | maxItems?: number; 63 | minItems?: number; 64 | } 65 | ) {} 66 | } 67 | 68 | export type Constraints = 69 | | NumberConstraints 70 | | StringConstraints 71 | | BigintConstraints 72 | | ArrayConstraints; 73 | 74 | // MultipleOfTypeId 75 | // MinItemsTypeId 76 | // MaxItemsTypeId 77 | // ItemsCountTypeId 78 | export const getConstraints = (ast: AST.AST): Constraints | undefined => { 79 | const TypeAnnotationId = ast.annotations[AST.TypeAnnotationId]; 80 | const jsonSchema: any = ast.annotations[AST.JSONSchemaAnnotationId]; 81 | 82 | switch (TypeAnnotationId) { 83 | // Number 84 | case S.GreaterThanTypeId: 85 | return new NumberConstraints({ 86 | exclusiveMin: jsonSchema.exclusiveMinimum 87 | }); 88 | case S.GreaterThanOrEqualToTypeId: 89 | return new NumberConstraints({ min: jsonSchema.minimum }); 90 | case S.LessThanTypeId: 91 | return new NumberConstraints({ 92 | exclusiveMax: jsonSchema.exclusiveMaximum 93 | }); 94 | case S.LessThanOrEqualToTypeId: 95 | return new NumberConstraints({ max: jsonSchema.maximum }); 96 | case S.IntTypeId: 97 | return new NumberConstraints({ isInt: true }); 98 | case S.BetweenTypeId: 99 | return new NumberConstraints({ 100 | min: jsonSchema.minimum, 101 | max: jsonSchema.maximum 102 | }); 103 | 104 | // Bigint 105 | case S.GreaterThanBigintTypeId: { 106 | const params: any = ast.annotations[TypeAnnotationId]; 107 | return new BigintConstraints({ 108 | min: params.min + 1n 109 | }); 110 | } 111 | case S.GreaterThanOrEqualToBigintTypeId: { 112 | const params: any = ast.annotations[TypeAnnotationId]; 113 | return new BigintConstraints({ 114 | min: params.min 115 | }); 116 | } 117 | case S.LessThanBigintTypeId: { 118 | const params: any = ast.annotations[TypeAnnotationId]; 119 | return new BigintConstraints({ 120 | max: params.max 121 | }); 122 | } 123 | case S.LessThanOrEqualToBigintTypeId: { 124 | const params: any = ast.annotations[TypeAnnotationId]; 125 | return new BigintConstraints({ 126 | max: params.max - 1n 127 | }); 128 | } 129 | case S.BetweenBigintTypeId: { 130 | const params: any = ast.annotations[TypeAnnotationId]; 131 | return new BigintConstraints({ 132 | min: params.min, 133 | max: params.max 134 | }); 135 | } 136 | // String 137 | case S.LengthTypeId: 138 | return new StringConstraints({ 139 | minLength: jsonSchema.minLength, 140 | maxLength: jsonSchema.maxLength 141 | }); 142 | case S.MinLengthTypeId: 143 | return new StringConstraints({ minLength: jsonSchema.minLength }); 144 | case S.MaxLengthTypeId: 145 | return new StringConstraints({ maxLength: jsonSchema.maxLength }); 146 | case S.PatternTypeId: 147 | return new StringConstraints({ pattern: jsonSchema.pattern }); 148 | 149 | // Array 150 | case S.MaxItemsTypeId: 151 | return new ArrayConstraints({ maxItems: jsonSchema.maxItems }); 152 | case S.MinItemsTypeId: 153 | return new ArrayConstraints({ minItems: jsonSchema.minItems }); 154 | case S.ItemsCountTypeId: 155 | return new ArrayConstraints({ 156 | minItems: jsonSchema.minItems, 157 | maxItems: jsonSchema.maxItems 158 | }); 159 | } 160 | }; 161 | 162 | export const combineConstraints = ( 163 | c1: Constraints | undefined, 164 | c2: Constraints | undefined 165 | ): Constraints | undefined => { 166 | if (c1 === undefined) { 167 | return c2; 168 | } 169 | if (c2 === undefined) { 170 | return c1; 171 | } 172 | switch (c1._tag) { 173 | case "NumberConstraints": { 174 | switch (c2._tag) { 175 | case "NumberConstraints": { 176 | const out = new NumberConstraints({ 177 | ...(c1.constraints as NumberConstraints["constraints"]), 178 | ...(c2.constraints as NumberConstraints["constraints"]) 179 | }); 180 | 181 | const min = getMax(c1.constraints.min, c2.constraints.min); 182 | if (isNumber(min)) { 183 | out.constraints.min = min; 184 | } 185 | const max = getMin(c1.constraints.max, c2.constraints.max); 186 | if (isNumber(max)) { 187 | out.constraints.max = max; 188 | } 189 | return out; 190 | } 191 | } 192 | break; 193 | } 194 | 195 | case "BigintConstraints": { 196 | switch (c2._tag) { 197 | case "BigintConstraints": { 198 | const out = new BigintConstraints({ 199 | ...(c1.constraints as BigintConstraints["constraints"]), 200 | ...(c2.constraints as BigintConstraints["constraints"]) 201 | }); 202 | 203 | const min = getMaxBigint( 204 | c1.constraints.min, 205 | c2.constraints.min 206 | ); 207 | if (isNumber(min)) { 208 | out.constraints.min = min; 209 | } 210 | const max = getMinBigint( 211 | c1.constraints.max, 212 | c2.constraints.max 213 | ); 214 | if (isNumber(max)) { 215 | out.constraints.max = max; 216 | } 217 | return out; 218 | } 219 | } 220 | break; 221 | } 222 | 223 | case "StringConstraints": { 224 | switch (c2._tag) { 225 | case "StringConstraints": { 226 | const out = new StringConstraints({ 227 | ...(c1.constraints as StringConstraints["constraints"]), 228 | ...(c2.constraints as StringConstraints["constraints"]) 229 | }); 230 | const min = getMax( 231 | c1.constraints.minLength, 232 | c2.constraints.minLength 233 | ); 234 | if (isNumber(min)) { 235 | out.constraints.minLength = min; 236 | } 237 | const max = getMin( 238 | c1.constraints.maxLength, 239 | c2.constraints.maxLength 240 | ); 241 | if (isNumber(max)) { 242 | out.constraints.maxLength = max; 243 | } 244 | return out; 245 | } 246 | } 247 | break; 248 | } 249 | } 250 | }; 251 | 252 | const getMax = ( 253 | n1: number | undefined, 254 | n2: number | undefined 255 | ): number | undefined => 256 | n1 === undefined ? n2 : n2 === undefined ? n1 : Math.max(n1, n2); 257 | 258 | const getMin = ( 259 | n1: number | undefined, 260 | n2: number | undefined 261 | ): number | undefined => 262 | n1 === undefined ? n2 : n2 === undefined ? n1 : Math.min(n1, n2); 263 | 264 | const getMaxBigint = ( 265 | n1: bigint | undefined, 266 | n2: bigint | undefined 267 | ): bigint | undefined => 268 | n1 === undefined ? n2 : n2 === undefined ? n1 : n1 > n2 ? n1 : n2; 269 | 270 | const getMinBigint = ( 271 | n1: bigint | undefined, 272 | n2: bigint | undefined 273 | ): bigint | undefined => 274 | n1 === undefined ? n2 : n2 === undefined ? n1 : n1 > n2 ? n2 : n1; 275 | -------------------------------------------------------------------------------- /src/faker.ts: -------------------------------------------------------------------------------- 1 | import * as AST from "@effect/schema/AST"; 2 | import * as S from "@effect/schema/Schema"; 3 | import type * as F from "@faker-js/faker"; 4 | import * as O from "effect/Option"; 5 | import { isBigInt, isNumber } from "effect/Predicate"; 6 | import * as RA from "effect/ReadonlyArray"; 7 | import { 8 | Constraints, 9 | combineConstraints, 10 | createHookId, 11 | getConstraints, 12 | memoizeThunk 13 | } from "./common"; 14 | 15 | export const FakerHookId = createHookId("FakerHookId"); 16 | 17 | interface Faker { 18 | (faker: F.Faker): A; 19 | } 20 | 21 | export const faker = ( 22 | faker: Faker 23 | ): ((self: S.Schema) => S.Schema) => 24 | S.annotations({ [FakerHookId]: faker }); 25 | 26 | const getAnnotation = AST.getAnnotation>(FakerHookId); 27 | const getId = AST.getAnnotation(AST.IdentifierAnnotationId); 28 | 29 | export const make = (schema: S.Schema): Faker => go(schema.ast); 30 | 31 | const knownTypesMap: Record unknown> = { 32 | Date: (f: F.Faker) => f.date.recent() 33 | }; 34 | 35 | const getHook = 36 | AST.getAnnotation<(...args: ReadonlyArray>) => Faker>( 37 | FakerHookId 38 | ); 39 | 40 | /** 41 | * @param depthLimit - Used to limit recursion and only generate elements of limited depth 42 | */ 43 | const go = ( 44 | ast: AST.AST, 45 | depthLimit = 10, 46 | constraints?: Constraints 47 | ): Faker => { 48 | /** 49 | * Attempts to prevent reaching recursion limit by limiting object complexity when depth limit reached. 50 | * The recursion limit can still be reached as no attempt to limit recursion is made if doing so would produce an invalid instance of the type. 51 | */ 52 | const depthLimitReached = depthLimit <= 0; 53 | 54 | const annotations = getAnnotation(ast); 55 | if (annotations._tag === "Some") { 56 | return annotations.value; 57 | } 58 | 59 | // "Known" types that can be generated by faker 60 | const id = getId(ast); 61 | if (id._tag === "Some" && id.value in knownTypesMap) { 62 | return knownTypesMap[id.value]; 63 | } 64 | 65 | const hook = getHook(ast); 66 | if (O.isSome(hook)) { 67 | switch (ast._tag) { 68 | case "Declaration": 69 | return hook.value( 70 | ...ast.typeParameters.map((p) => 71 | go(p, depthLimit, getConstraints(ast)) 72 | ) 73 | ); 74 | default: 75 | return hook.value(); 76 | } 77 | } 78 | 79 | switch (ast._tag) { 80 | case "NeverKeyword": 81 | throw new Error("cannot build a Faker for `never`"); 82 | 83 | case "Refinement": 84 | return go( 85 | ast.from, 86 | depthLimit - 1, 87 | combineConstraints(constraints, getConstraints(ast)) 88 | ); 89 | case "Transformation": 90 | return go( 91 | ast.to, 92 | depthLimit - 1, 93 | combineConstraints(constraints, getConstraints(ast)) 94 | ); 95 | 96 | case "Declaration": { 97 | throw new Error( 98 | `cannot build an instance of Faker for a declaration without annotations (${ast})` 99 | ); 100 | } 101 | case "UndefinedKeyword": 102 | return () => undefined; 103 | 104 | case "ObjectKeyword": 105 | return () => ({}); 106 | case "Enums": 107 | return (f: F.Faker) => 108 | f.helpers.arrayElement(ast.enums.map((e) => e[1])); 109 | case "Literal": 110 | return (f: F.Faker) => f.helpers.arrayElement([ast.literal]); 111 | case "BooleanKeyword": 112 | return (f: F.Faker) => f.datatype.boolean(); 113 | case "NumberKeyword": 114 | return (f: F.Faker) => { 115 | if (constraints && constraints._tag === "NumberConstraints") { 116 | const c = constraints.constraints; 117 | 118 | const min = 119 | c.min ?? c.exclusiveMin ?? Number.MIN_SAFE_INTEGER; 120 | const max = 121 | c.max ?? c.exclusiveMax ?? Number.MAX_SAFE_INTEGER; 122 | 123 | const val = constraints.constraints.isInt 124 | ? f.number.int({ 125 | min: isNumber(c.exclusiveMin) ? min + 1 : min, 126 | max: isNumber(c.exclusiveMax) ? max - 1 : max 127 | }) 128 | : f.number.float({ min, max }); 129 | 130 | return val; 131 | } 132 | 133 | return f.number.float(); 134 | }; 135 | case "BigIntKeyword": 136 | return (f: F.Faker) => { 137 | if (constraints && constraints._tag === "BigintConstraints") { 138 | const c = constraints.constraints; 139 | 140 | const min = c.min ?? undefined; 141 | const max = c.max ?? undefined; 142 | 143 | const val = f.number.bigInt({ 144 | min: isBigInt(c.min) ? c.min + 1n : min, 145 | max: isBigInt(c.max) ? c.max - 1n : max 146 | }); 147 | 148 | return val; 149 | } 150 | return f.number.bigInt(); 151 | }; 152 | case "StringKeyword": 153 | return (f: F.Faker) => { 154 | const c = constraints; 155 | 156 | if (c && c._tag === "StringConstraints") { 157 | const min = c.constraints.minLength ?? 0; 158 | const max = c.constraints.maxLength; 159 | const pattern = c.constraints.pattern; 160 | 161 | return pattern 162 | ? f.helpers.fromRegExp(pattern) 163 | : f.string.sample({ min, max }); 164 | } 165 | 166 | return f.string.sample(); 167 | }; 168 | case "SymbolKeyword": 169 | return (f: F.Faker) => 170 | Symbol(f.string.alphanumeric({ length: { min: 0, max: 10 } })); 171 | case "UniqueSymbol": 172 | return (f: F.Faker) => 173 | Symbol(f.string.alphanumeric({ length: { min: 0, max: 10 } })); 174 | case "TemplateLiteral": { 175 | return (f: F.Faker) => { 176 | const components = [ast.head]; 177 | for (const span of ast.spans) { 178 | components.push(go(span.type, depthLimit - 1)(f)); 179 | components.push(span.literal); 180 | } 181 | return components.join(""); 182 | }; 183 | } 184 | case "Union": { 185 | const u = ast.types.map((t) => go(t, depthLimit - 1)); 186 | return (f: F.Faker) => f.helpers.arrayElement(u.map((el) => el(f))); 187 | } 188 | case "TupleType": { 189 | const els = ast.elements.map((e) => go(e.type, depthLimit - 1)); 190 | 191 | const values = RA.fromIterable(ast.rest.values()); 192 | const head = RA.head(values).pipe( 193 | O.map((ast) => go(ast, depthLimit - 1)) 194 | ); 195 | 196 | const tail = RA.tail(values).pipe( 197 | O.map((as) => as.map((e) => go(e, depthLimit - 1))) 198 | ); 199 | 200 | if (depthLimitReached) { 201 | return () => []; 202 | } 203 | 204 | return (f: F.Faker) => { 205 | let min = 0, 206 | max = 10; // default max, min 207 | const c = constraints; 208 | if (c && c._tag === "ArrayConstraints") { 209 | if (c.constraints.maxItems) { 210 | max = c.constraints.maxItems ?? max; 211 | } 212 | if (c.constraints.minItems) { 213 | min = c.constraints.minItems ?? min; 214 | } 215 | } 216 | 217 | const numToGen = f.number.int({ min, max }); 218 | const s = O.all(RA.range(1, numToGen).map(() => head)).pipe( 219 | O.getOrElse(() => [] as Faker[]) 220 | ); 221 | const restEls = numToGen > 0 ? s : []; 222 | 223 | const postRestEls = O.getOrElse(tail, () => []); 224 | 225 | const ret = [ 226 | ...els.map((el) => el(f)), 227 | ...restEls.map((el) => el(f)), 228 | ...postRestEls.map((el) => el(f)) 229 | ]; 230 | 231 | return ret; 232 | }; 233 | } 234 | case "Suspend": { 235 | const get = memoizeThunk(() => go(ast.f(), depthLimit - 1)); 236 | return (f) => get()(f); 237 | } 238 | case "TypeLiteral": { 239 | const propertySignaturesTypes = ast.propertySignatures.map((f) => 240 | go(f.type, depthLimit - 1) 241 | ); 242 | const indexSignatures = ast.indexSignatures.map( 243 | (is) => 244 | [ 245 | go(is.parameter, depthLimit - 1), 246 | go(is.type, depthLimit - 1) 247 | ] as const 248 | ); 249 | 250 | return (f: F.Faker) => { 251 | const output: any = {}; 252 | 253 | // handle property signatureIs 254 | for (let i = 0; i < propertySignaturesTypes.length; i++) { 255 | const ps = ast.propertySignatures[i]; 256 | const name = ps.name; 257 | 258 | // whether to include prop if it is optional 259 | const includeOptional = depthLimitReached 260 | ? false 261 | : f.datatype.boolean(); 262 | if (!ps.isOptional || includeOptional) { 263 | output[name] = propertySignaturesTypes[i](f); 264 | } 265 | } 266 | 267 | // index signatures 268 | for (let i = 0; i < indexSignatures.length; i++) { 269 | const parameter = indexSignatures[i][0](f); 270 | const type = indexSignatures[i][1](f); 271 | 272 | output[parameter] = type; 273 | } 274 | 275 | return output; 276 | }; 277 | } 278 | } 279 | 280 | throw new Error(`unhandled ${ast._tag}`); 281 | }; 282 | --------------------------------------------------------------------------------