├── .editorconfig
├── .env.local.example
├── .eslintrc.json
├── .github
├── CODE_OF_CONDUCT.md
├── CONTRIBUTING.md
├── ISSUE_TEMPLATE
│ ├── BUG_REPORT.md
│ └── FEATURE_REQUEST.md
├── PULL_REQUEST_TEMPLATE.md
├── SECURITY.md
└── workflows
│ ├── build_pr_ubuntu.yml
│ └── build_pr_win.yml
├── .gitignore
├── .idea
├── .gitignore
├── lorado.iml
└── modules.xml
├── .node-version
├── .nvmrc
├── .prettierignore
├── .prettierrc.json
├── CHANGELOG.md
├── LICENSE
├── README.md
├── assets
├── crop.png
├── folder.png
├── generations
│ ├── example-1.png
│ ├── example-10.png
│ ├── example-11.png
│ ├── example-2.png
│ ├── example-3.png
│ ├── example-4.png
│ ├── example-5.png
│ ├── example-6.png
│ ├── example-7.png
│ ├── example-8.png
│ └── example-9.png
├── projects.png
├── slideshow.png
└── ui.png
├── client
├── components
│ ├── FaceDetectionImage.tsx
│ ├── FileUpload.tsx
│ ├── Header.tsx
│ ├── ImageItem.tsx
│ ├── ImageWithErrorHandling.tsx
│ ├── Layout.tsx
│ ├── Masonry.tsx
│ ├── ProjectsTable.tsx
│ ├── SlideshowModal.tsx
│ └── SynchedSliderInput.tsx
├── data
│ ├── exampleImages.ts
│ └── kohyaConfig.ts
├── services
│ └── prepare
│ │ ├── crop.ts
│ │ ├── prepare.ts
│ │ ├── reg.ts
│ │ ├── sizes.ts
│ │ └── utils.ts
├── types.ts
└── utils
│ ├── samples.ts
│ └── traverseFileTree.ts
├── next.config.js
├── package-lock.json
├── package.json
├── pages
├── _app.tsx
├── _document.tsx
├── api
│ ├── image
│ │ ├── delete
│ │ │ └── index.ts
│ │ ├── edit.ts
│ │ └── upload.ts
│ ├── prepare.ts
│ ├── projects
│ │ └── index.ts
│ └── uploads
│ │ └── [...args].ts
├── index.tsx
├── projects
│ └── [id].tsx
└── test
│ └── face-detect.tsx
├── public
├── face-api
│ └── models
│ │ ├── age_gender_model-weights_manifest.json
│ │ ├── age_gender_model.bin
│ │ ├── face_expression_model-weights_manifest.json
│ │ ├── face_expression_model.bin
│ │ ├── face_landmark_68_model-weights_manifest.json
│ │ ├── face_landmark_68_model.bin
│ │ ├── face_landmark_68_tiny_model-weights_manifest.json
│ │ ├── face_landmark_68_tiny_model.bin
│ │ ├── face_recognition_model-weights_manifest.json
│ │ ├── face_recognition_model.bin
│ │ ├── ssd_mobilenetv1_model-weights_manifest.json
│ │ ├── ssd_mobilenetv1_model.bin
│ │ ├── tiny_face_detector_model-weights_manifest.json
│ │ └── tiny_face_detector_model.bin
├── icons
│ ├── android-chrome-192x192.png
│ ├── android-chrome-512x512.png
│ ├── apple-touch-icon.png
│ ├── browserconfig.xml
│ ├── favicon-16x16.png
│ ├── favicon-32x32.png
│ ├── favicon.ico
│ ├── mstile-144x144.png
│ ├── mstile-150x150.png
│ ├── mstile-310x150.png
│ ├── mstile-310x310.png
│ └── mstile-70x70.png
└── images
│ └── anamnesis33
│ ├── example (1).jpg
│ ├── example (2).jpg
│ ├── example (3).jpg
│ └── example (4).jpg
├── training
└── .gitkeep
└── tsconfig.json
/.editorconfig:
--------------------------------------------------------------------------------
1 | root = true
2 |
3 | [*]
4 | charset = utf-8
5 | indent_style = tab
6 | indent_size = 4
7 | max_line_length = 100
8 | end_of_line = lf
9 | trim_trailing_whitespace = true
10 | insert_final_newline = true
11 |
12 | # trailing spaces in markdown indicate word wrap
13 | [*.md]
14 | trim_trailing_whitespace = false
15 |
16 |
17 | [{*.json,*.md,*.yml,.*rc,.*config}]
18 | indent_style = space
19 |
20 | [{*.json,*.yml,.*rc,.*config}]
21 | indent_size = 2
22 |
--------------------------------------------------------------------------------
/.env.local.example:
--------------------------------------------------------------------------------
1 | UNSPLASH_ACCESS_KEY=
2 |
--------------------------------------------------------------------------------
/.eslintrc.json:
--------------------------------------------------------------------------------
1 | {
2 | "root": true,
3 | "parser": "@typescript-eslint/parser",
4 | "extends": ["typescript", "eslint:recommended", "xo", "plugin:prettier/recommended"],
5 | "env": {
6 | "browser": true,
7 | "es2022": true,
8 | "jest": true,
9 | "node": true
10 | },
11 | "globals": {
12 | "BufferEncoding": "readonly"
13 | },
14 | "plugins": ["@typescript-eslint", "unicorn", "unused-imports", "import", "jest", "prettier"],
15 | "ignorePatterns": ["*.d.ts"],
16 | "rules": {
17 | "import/order": [
18 | "error",
19 | {
20 | "alphabetize": {
21 | "order": "asc",
22 | "caseInsensitive": true
23 | },
24 | "newlines-between": "always"
25 | }
26 | ],
27 | "@typescript-eslint/no-explicit-any": "warn",
28 | "no-unused-vars": "off",
29 | "@typescript-eslint/no-unused-vars": "off",
30 | "unused-imports/no-unused-imports": "error",
31 | "unused-imports/no-unused-vars": [
32 | "warn",
33 | { "vars": "all", "varsIgnorePattern": "^_", "args": "after-used", "argsIgnorePattern": "^_" }
34 | ],
35 | "curly": "error",
36 | "no-nested-ternary": "error",
37 | "no-multiple-empty-lines": "error",
38 | "object-curly-spacing": ["error", "always"],
39 | "prettier/prettier": "error",
40 | "arrow-body-style": ["error", "as-needed"],
41 | "@typescript-eslint/consistent-type-imports": "warn",
42 | "no-await-in-loop": "warn"
43 | },
44 | "overrides": [
45 | {
46 | "files": "types.ts",
47 | "rules": {
48 | "no-unused-vars": "off"
49 | }
50 | }
51 | ]
52 | }
53 |
--------------------------------------------------------------------------------
/.github/CODE_OF_CONDUCT.md:
--------------------------------------------------------------------------------
1 | # Contributor Covenant Code of Conduct
2 |
3 | ## Our Pledge
4 |
5 | We as members, contributors, and leaders pledge to make participation in our
6 | community a harassment-free experience for everyone, regardless of age, body
7 | size, visible or invisible disability, ethnicity, sex characteristics, gender
8 | identity and expression, level of experience, education, socio-economic status,
9 | nationality, personal appearance, race, religion, or sexual identity
10 | and orientation.
11 |
12 | We pledge to act and interact in ways that contribute to an open, welcoming,
13 | diverse, inclusive, and healthy community.
14 |
15 | ## Our Standards
16 |
17 | Examples of behavior that contributes to a positive environment for our
18 | community include:
19 |
20 | * Demonstrating empathy and kindness toward other people
21 | * Being respectful of differing opinions, viewpoints, and experiences
22 | * Giving and gracefully accepting constructive feedback
23 | * Accepting responsibility and apologizing to those affected by our mistakes,
24 | and learning from the experience
25 | * Focusing on what is best not just for us as individuals, but for the
26 | overall community
27 |
28 | Examples of unacceptable behavior include:
29 |
30 | * The use of sexualized language or imagery, and sexual attention or
31 | advances of any kind
32 | * Trolling, insulting or derogatory comments, and personal or political attacks
33 | * Public or private harassment
34 | * Publishing others' private information, such as a physical or email
35 | address, without their explicit permission
36 | * Other conduct which could reasonably be considered inappropriate in a
37 | professional setting
38 |
39 | ## Enforcement Responsibilities
40 |
41 | Community leaders are responsible for clarifying and enforcing our standards of
42 | acceptable behavior and will take appropriate and fair corrective action in
43 | response to any behavior that they deem inappropriate, threatening, offensive,
44 | or harmful.
45 |
46 | Community leaders have the right and responsibility to remove, edit, or reject
47 | comments, commits, code, wiki edits, issues, and other contributions that are
48 | not aligned to this Code of Conduct, and will communicate reasons for moderation
49 | decisions when appropriate.
50 |
51 | ## Scope
52 |
53 | This Code of Conduct applies within all community spaces, and also applies when
54 | an individual is officially representing the community in public spaces.
55 | Examples of representing our community include using an official e-mail address,
56 | posting via an official social media account, or acting as an appointed
57 | representative at an online or offline event.
58 |
59 | ## Enforcement
60 |
61 | Instances of abusive, harassing, or otherwise unacceptable behavior may be
62 | reported to the community leaders responsible for enforcement at
63 | .
64 | All complaints will be reviewed and investigated promptly and fairly.
65 |
66 | All community leaders are obligated to respect the privacy and security of the
67 | reporter of any incident.
68 |
69 | ## Enforcement Guidelines
70 |
71 | Community leaders will follow these Community Impact Guidelines in determining
72 | the consequences for any action they deem in violation of this Code of Conduct:
73 |
74 | ### 1. Correction
75 |
76 | **Community Impact**: Use of inappropriate language or other behavior deemed
77 | unprofessional or unwelcome in the community.
78 |
79 | **Consequence**: A private, written warning from community leaders, providing
80 | clarity around the nature of the violation and an explanation of why the
81 | behavior was inappropriate. A public apology may be requested.
82 |
83 | ### 2. Warning
84 |
85 | **Community Impact**: A violation through a single incident or series
86 | of actions.
87 |
88 | **Consequence**: A warning with consequences for continued behavior. No
89 | interaction with the people involved, including unsolicited interaction with
90 | those enforcing the Code of Conduct, for a specified period of time. This
91 | includes avoiding interactions in community spaces as well as external channels
92 | like social media. Violating these terms may lead to a temporary or
93 | permanent ban.
94 |
95 | ### 3. Temporary Ban
96 |
97 | **Community Impact**: A serious violation of community standards, including
98 | sustained inappropriate behavior.
99 |
100 | **Consequence**: A temporary ban from any sort of interaction or public
101 | communication with the community for a specified period of time. No public or
102 | private interaction with the people involved, including unsolicited interaction
103 | with those enforcing the Code of Conduct, is allowed during this period.
104 | Violating these terms may lead to a permanent ban.
105 |
106 | ### 4. Permanent Ban
107 |
108 | **Community Impact**: Demonstrating a pattern of violation of community
109 | standards, including sustained inappropriate behavior, harassment of an
110 | individual, or aggression toward or disparagement of classes of individuals.
111 |
112 | **Consequence**: A permanent ban from any sort of public interaction within
113 | the community.
114 |
115 | ## Attribution
116 |
117 | This Code of Conduct is adapted from the [Contributor Covenant][homepage],
118 | version 2.0, available at
119 | https://www.contributor-covenant.org/version/2/0/code_of_conduct.html.
120 |
121 | Community Impact Guidelines were inspired by [Mozilla's code of conduct
122 | enforcement ladder](https://github.com/mozilla/diversity).
123 |
124 | [homepage]: https://www.contributor-covenant.org
125 |
126 | For answers to common questions about this code of conduct, see the FAQ at
127 | https://www.contributor-covenant.org/faq. Translations are available at
128 | https://www.contributor-covenant.org/translations.
129 |
--------------------------------------------------------------------------------
/.github/CONTRIBUTING.md:
--------------------------------------------------------------------------------
1 | # Contributing
2 |
3 | When contributing to this repository, please first discuss the change you wish to make via issue,
4 | email, or any other method with the owners of this repository before making a change.
5 |
6 | Please note we have a code of conduct, please follow it in all your interactions with the project.
7 |
8 | ## Pull Request Process
9 |
10 | Ensure any install or build dependencies are removed before the end of the layer when doing a build.
11 | Fork the repository and create a new branch (feature/my-feature) Commit changes following the
12 | "conventional-changelog" rules. Do not modify any versions manually. Don't build new versions. Use
13 | the PULL_REQUEST_TEMPLATE
14 |
15 | ## Reporting issues
16 |
17 | Ensure any install or build dependencies are removed before the end of the layer when doing a build.
18 | Create a new issue (bug/some-bug) Always list "yarn version", "node version" Use the ISSUE_TEMPLATE
19 |
--------------------------------------------------------------------------------
/.github/ISSUE_TEMPLATE/BUG_REPORT.md:
--------------------------------------------------------------------------------
1 | ---
2 | name: Bug report
3 | about: Create a report to help us improve
4 | title: "[BUG]: …"
5 | labels: ""
6 | assignees: ""
7 | ---
8 |
9 | **Describe the bug**
10 |
11 |
12 |
13 | **Expected behavior**
14 |
15 |
16 | **Screenshots**
17 |
18 |
19 | **Versions (please complete the following information):**
20 |
21 | - Browser: [e.g. Chrome 115.0.5790.171]
22 | - Node.js: [e.g. v18.14.2]
23 | - OS: [e.g. windows 11]
24 | - LoRAdo [e.g. 0.1.0]
25 |
26 | **Additional context**
27 |
28 |
--------------------------------------------------------------------------------
/.github/ISSUE_TEMPLATE/FEATURE_REQUEST.md:
--------------------------------------------------------------------------------
1 | ---
2 | name: Feature request
3 | about: Suggest an idea for this project
4 | title: ""
5 | labels: ""
6 | assignees: ""
7 | ---
8 |
9 | **Is your feature request related to a problem? Please describe.** A clear and concise description
10 | of what the problem is. Ex. I'm always frustrated when [...]
11 |
12 | **Describe the solution you'd like** A clear and concise description of what you want to happen.
13 |
14 | **Describe alternatives you've considered** A clear and concise description of any alternative
15 | solutions or features you've considered.
16 |
17 | **Additional context** Add any other context or screenshots about the feature request here.
18 |
--------------------------------------------------------------------------------
/.github/PULL_REQUEST_TEMPLATE.md:
--------------------------------------------------------------------------------
1 | ## Motivation
2 |
3 |
4 |
5 | ## Issues closed
6 |
7 |
8 |
--------------------------------------------------------------------------------
/.github/SECURITY.md:
--------------------------------------------------------------------------------
1 | # Security Policy
2 |
3 | ## Supported Versions
4 |
5 | Use this section to tell people about which versions of your project are currently being supported
6 | with security updates.
7 |
8 | | Version | Supported |
9 | |---------| ------------------ |
10 | | 0.x.x | :white_check_mark: |
11 |
12 | ## Reporting a Vulnerability
13 |
14 | Use this section to tell people how to report a vulnerability.
15 |
16 | Tell them where to go, how often they can expect to get an update on a reported vulnerability, what
17 | to expect if the vulnerability is accepted or declined, etc.
18 |
--------------------------------------------------------------------------------
/.github/workflows/build_pr_ubuntu.yml:
--------------------------------------------------------------------------------
1 | name: Build Branch on Pull Request ubuntu
2 |
3 | on:
4 | pull_request:
5 | types: [opened, synchronize, reopened]
6 | branches:
7 | - "**"
8 | push:
9 | branches:
10 | - main
11 |
12 | jobs:
13 | build-unix:
14 | name: Run Build Ubuntu
15 | runs-on: ubuntu-20.04
16 |
17 | steps:
18 | - uses: actions/checkout@v3
19 |
20 | - uses: n1hility/cancel-previous-runs@v2
21 | with:
22 | token: ${{ secrets.GITHUB_TOKEN }}
23 |
24 | - uses: actions/setup-node@v3
25 | with:
26 | node-version-file: '.nvmrc'
27 |
28 | - name: Get npm cache directory path
29 | id: npm-cache-dir-path
30 | run: echo "::set-output name=dir::$(npm config get cache)"
31 |
32 | - uses: actions/cache@v3
33 | id: cache
34 | with:
35 | path: |
36 | ${{ steps.npm-cache-dir-path.outputs.dir }}
37 | key: ${{ runner.os }}-${{ hashFiles('**/package-lock.json') }}
38 |
39 | - run: npm ci
40 |
41 | - run: npm run build
42 |
--------------------------------------------------------------------------------
/.github/workflows/build_pr_win.yml:
--------------------------------------------------------------------------------
1 | name: Build Branch on Pull Request Win
2 |
3 | on:
4 | pull_request:
5 | types: [opened, synchronize, reopened]
6 | branches:
7 | - "**"
8 | push:
9 | branches:
10 | - main
11 |
12 | jobs:
13 | build-win:
14 | name: Run Build Win
15 | runs-on: windows-2022
16 |
17 | steps:
18 | - uses: actions/checkout@v3
19 |
20 | - uses: n1hility/cancel-previous-runs@v2
21 | with:
22 | token: ${{ secrets.GITHUB_TOKEN }}
23 |
24 | - uses: actions/setup-node@v3
25 | with:
26 | node-version-file: '.nvmrc'
27 |
28 | - name: Get npm cache directory path
29 | id: npm-cache-dir-path
30 | run: echo "::set-output name=dir::$(npm config get cache)"
31 |
32 | - uses: actions/cache@v3
33 | id: cache
34 | with:
35 | path: |
36 | ${{ steps.npm-cache-dir-path.outputs.dir }}
37 | key: ${{ runner.os }}-${{ hashFiles('**/package-lock.json') }}
38 |
39 | - run: npm ci
40 |
41 | - run: npm run build
42 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | # See https://help.github.com/articles/ignoring-files/ for more about ignoring files.
2 |
3 | /venv/
4 | /data/
5 | __pycache__/
6 | /weights/
7 | /wandb/
8 |
9 | test_generation.ipynb
10 | # dependencies
11 | /node_modules
12 | /.pnp
13 | .pnp.js
14 |
15 | # testing
16 | /coverage
17 |
18 | # next.js
19 | /.next/
20 | /out/
21 |
22 | # production
23 | /build
24 |
25 | # misc
26 | .DS_Store
27 | *.pem
28 |
29 | # debug
30 | npm-debug.log*
31 | yarn-debug.log*
32 | yarn-error.log*
33 |
34 | # local env files
35 | .env*.local
36 |
37 | # vercel
38 | .vercel
39 |
40 | # typescript
41 | *.tsbuildinfo
42 | next-env.d.ts
43 |
44 | # intellij
45 | .idea
46 |
47 | # vscode
48 | .vscode/
49 |
50 | # training data
51 | /training/*
52 | !/training/.gitkeep
53 |
54 | # models
55 | /models/*/*
56 | !/models/*/.gitkeep
57 |
--------------------------------------------------------------------------------
/.idea/.gitignore:
--------------------------------------------------------------------------------
1 | # Default ignored files
2 | /shelf/
3 | /workspace.xml
4 | # Editor-based HTTP Client requests
5 | /httpRequests/
6 |
--------------------------------------------------------------------------------
/.idea/lorado.iml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
--------------------------------------------------------------------------------
/.idea/modules.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
--------------------------------------------------------------------------------
/.node-version:
--------------------------------------------------------------------------------
1 | v18.5.0
2 |
--------------------------------------------------------------------------------
/.nvmrc:
--------------------------------------------------------------------------------
1 | v18.5.0
2 |
--------------------------------------------------------------------------------
/.prettierignore:
--------------------------------------------------------------------------------
1 | CHANGELOG.md
2 |
--------------------------------------------------------------------------------
/.prettierrc.json:
--------------------------------------------------------------------------------
1 | {
2 | "trailingComma": "es5",
3 | "semi": true,
4 | "singleQuote": false,
5 | "quoteProps": "as-needed",
6 | "jsxSingleQuote": false,
7 | "bracketSpacing": true,
8 | "arrowParens": "avoid",
9 | "proseWrap": "always",
10 | "endOfLine":"auto"
11 | }
12 |
--------------------------------------------------------------------------------
/CHANGELOG.md:
--------------------------------------------------------------------------------
1 | # [0.1.0-alpha.8](https://github.com/failfa-st/LoRAdo/compare/v0.1.0-alpha.1...v0.1.0-alpha.8) (2023-08-18)
2 |
3 |
4 | ### Features
5 |
6 | * **upload:** implement better image loading ([#11](https://github.com/failfa-st/LoRAdo/issues/11)) ([1a8958a](https://github.com/failfa-st/LoRAdo/commit/1a8958aebddcc636e9b70347eb57ee21dd6fbc6c))
7 |
8 |
9 |
10 | # 0.1.0-alpha.7 (2023-08-17)
11 |
12 |
13 | ### Bug Fixes
14 |
15 | * EXDEV ([708ab9d](https://github.com/failfa-st/LoRAdo/commit/708ab9dfb7627b4118fb0ee646b9581222ae1214))
16 |
17 |
18 | # 0.1.0-alpha.6 (2023-08-17)
19 |
20 |
21 | ### Features
22 |
23 | * better masonry ([1c165ad](https://github.com/failfa-st/LoRAdo/commit/1c165adda2994b676fa990d0232bd1b25d1bb2af))
24 |
25 |
26 |
27 | # 0.1.0-alpha.5 (2023-08-17)
28 |
29 |
30 | ### Features
31 |
32 | * add support for SD1.5 ([c087459](https://github.com/failfa-st/LoRAdo/commit/c0874596f3e745d88701bd2a9099d954ccd95a78))
33 |
34 |
35 | # 0.1.0-alpha.4 (2023-08-17)
36 |
37 |
38 | ### Bug Fixes
39 |
40 | * rendering lag ([1082bc2](https://github.com/failfa-st/LoRAdo/commit/1082bc27405f5202e9daa0084156fa73d8f4a34d))
41 |
42 | # 0.1.0-alpha.3 (2023-08-16)
43 |
44 |
45 | ### Features
46 |
47 | * ad image uploading ([a93fbdc](https://github.com/failfa-st/LoRAdo/commit/a93fbdcfdf8877739c42eb4ed4e2ec467013d0e0))
48 | * add dataset creation ([a486936](https://github.com/failfa-st/LoRAdo/commit/a4869368502c49cc3766c245f06dad084a4ef0f8))
49 | * add sample prompts ([9b4efb7](https://github.com/failfa-st/LoRAdo/commit/9b4efb7839d8fb9de11818cffeeb4d1d7cf6994d))
50 | * allow creating new captions ([08c5810](https://github.com/failfa-st/LoRAdo/commit/08c5810b6b06b1db70f0147ecbe8df6a1557483c))
51 | * basic UI ([686e438](https://github.com/failfa-st/LoRAdo/commit/686e43844e25d33606d470307dc91465a6032e21))
52 | * calculate repeats ([c0d8300](https://github.com/failfa-st/LoRAdo/commit/c0d83002b12df579bd7eee946416f1ad4d9d8ae7))
53 | * projects ([f07bc41](https://github.com/failfa-st/LoRAdo/commit/f07bc41e2d0df8d036c6734eead8e37cd1de2954))
54 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # LoRAdo
2 |
3 | [](https://discord.com/invite/m3TBB9XEkb)
4 |
5 | Prepare datasets for [Kohya_ss](https://github.com/bmaltais/kohya_ss/) (a UI for
6 | https://github.com/kohya-ss/sd-scripts.)
7 |
8 | > We plan to integrate training directly into this tool
9 |
10 |
11 |
12 | - [Intro](#intro)
13 | - [Why?](#why)
14 | - [Screenshots](#screenshots)
15 | - [Features](#features)
16 | - [Getting Started with LoRAdo](#getting-started-with-lorado)
17 | - [Update LoRAdo](#update-lorado)
18 | - [Using LoRAdo](#using-lorado)
19 | - [How LoRAdo Works](#how-lorado-works)
20 | - [Generation examples](#generation-examples)
21 |
22 |
23 |
24 | ## Intro
25 |
26 | Welcome to LoRAdo. This toolkit is designed to streamline the process of LoRAs image generation.
27 | Suitable for both beginners and experienced developers, it simplifies dataset creation. Initially
28 | designed with portraits in mind, LoRAdo is adaptable and works well for various subjects. Key
29 | features include face detection and user-friendly cropping tools, ensuring a practical balance
30 | between user experience and results.
31 |
32 | With LoRAdo, creating datasets for image training is straightforward and efficient.
33 |
34 | ## Why?
35 |
36 | We wanted to provide an easy way to train LoRAs for different use cases. During research, we decided
37 | to build a few small helper functions to help us gather images and crop them as well as create
38 | caption files. This UI offers an easy way to create LoRAs without overthinking it.
39 |
40 | ## Screenshots
41 |
42 |
43 |
44 |
45 |
46 |
47 | The user interface
48 | |
49 |
50 |
51 |
52 |
53 | View and configure images
54 | |
55 |
56 |
57 |
58 |
59 |
60 |
61 | Project view
62 | |
63 |
64 |
65 |
66 | The prepared folder
67 | |
68 |
69 |
70 |
71 |
72 | ## Features
73 |
74 | - Dataset creation
75 | - Captioning tools
76 | - Create and edit projects
77 | - support for SDXL and SD-1.5
78 | - SDXL or SD-1.5 resolution adjustment
79 | - Multiple image resolutions
80 | - [Kohya_ss (UI)](https://github.com/bmaltais/kohya_ss/) config creator
81 | - Regularisation image tools (WIP)
82 |
83 | ## Getting Started with LoRAdo
84 |
85 | 1. **Prerequisites:**
86 |
87 | - Ensure you have `node.js (v18.5.0)` installed. If not, you can download it from
88 | [Node.js official site](https://nodejs.org/). ([use a node version manager](#node-version-managers)) to quickly switch to a different version.
89 | - There is an open issue with tensorflow. Please make sure to install v18 (not above). The
90 | recommended version can be found in [.nvmrc](.nvmrc)
91 | - If there is a problem with tensorflow, please check
92 | [this issue](https://github.com/failfa-st/LoRAdo/issues/1)
93 |
94 | 2. **Clone and Install Dependencies:**
95 |
96 | ```bash
97 | git clone https://github.com/failfa-st/LoRAdo.git
98 | cd LoRAdo
99 | npm install
100 | ```
101 |
102 | 3. **Running the App:**
103 |
104 | ```bash
105 | npm run build
106 | npm run start
107 | ```
108 |
109 | Your app should now be running on [http://localhost:3000](http://localhost:3000). Navigate to
110 | this URL in your browser to explore the LoRAdo UI.
111 |
112 | ## Update LoRAdo
113 |
114 | 1. **Stop LoRAdo if it's already running**
115 |
116 | - CTRL+C to terminate the process in the terminal
117 |
118 | 2. **Update the code and dependencies:**
119 |
120 | ```bash
121 | git fetch origin
122 | git reset --hard origin/main
123 | npm install
124 | ```
125 |
126 | 3. **Run the App:**
127 |
128 | - Execute the commands as shown under _Running the App_ above
129 |
130 | ## Using LoRAdo
131 |
132 | 1. **Step 1:** Begin by uploading your chosen image(s).
133 | 2. **Step 2:** Let LoRAdo's face detection feature scan the image.
134 | - If a face is detected, the system identifies it as a potential focal point for cropping.
135 | - If no face is detected, evaluate if the image is suitable for your needs.
136 | 3. **Step 3:** Add captions to your images directly within the app, enhancing the context and
137 | meaning of each.
138 | 4. **Step 4:** Choose your cropping method:
139 | - Opt for minimal cropping by finding the best-fitting resolution based on the original aspect
140 | ratio, while still considering the detected focal point.
141 | - Or, generate 9 SDXL (3 SD-1.5 respectively) compatible crops for each image.
142 | 5. **Step 5:** With a single click on "prepare", transform your curated images into a dataset ,
143 | compatible to [Kohya_ss](https://github.com/bmaltais/kohya_ss/).
144 | 6. **Step 6:** Navigate to the [training folder](training):
145 | - Every dataset here comes with its own `config.json` file.
146 | - These configurations are primed for integration into Kohya_ss.
147 |
148 | ## How LoRAdo Works
149 |
150 | At the heart of LoRAdo is the intent to simplify the complex.
151 |
152 | 1. **Image Optimization:** Start with any image. LoRAdo smartly crops it to fit the ideal SDXL
153 | resolution. For images with faces, they become the focal point, ensuring the best visual result.
154 | Without a face? No worries, we center-crop it perfectly.
155 |
156 | 2. **Multiple Resolutions:** Beyond the default, you have the option to generate images in all 9
157 | SDXL resolutions, as an attempt to increase flexibility without compromising on quality.
158 |
159 | 3. **Configuration Made Easy:** LoRAdo generates a configuration file tailored for Kohya_ss. Once
160 | your project is set, integrating it with Kohya_ss is a breeze. Simply load the config under the
161 | LoRA tab, and you're all set to begin training. And if you wish, Kohya_ss offers more granular
162 | configuration options.
163 |
164 | While our approach is guided by a strong opinion on usability, it still leaves room for
165 | customization. As we evolve, expect enhanced configurability. But rest assured, simplicity will
166 | always be our guiding star, ensuring that interacting with LoRAdo remains intuitive.
167 |
168 | Example images via [@anamnesis33](https://unsplash.com/@anamnesis33)
169 |
170 | | Image | Link |
171 | | ------------------------------------------------------------------------------------------------------------------ | --------------------------------------- |
172 | | [
]() | https://unsplash.com/photos/mqcYKihgfAo |
173 | | [
]() | https://unsplash.com/photos/06TuQM7RSP4 |
174 | | [
]() | https://unsplash.com/photos/AUJhl146mBY |
175 | | [
]() | https://unsplash.com/photos/8OWttYqN47I |
176 |
177 | This tool provides an opinionated configuration and approach to training flexible LoRAs. We are
178 | constantly researching to improve the default settings provided by this tool. Advanced (detailed)
179 | configuration is planned for future releases.
180 |
181 | ## Generation examples
182 |
183 | These examples were generated from a LoRA, trained on a dataset that was prepared with this
184 | tool/approach
185 |
186 | [
](assets/generations/example-1.png)
187 | [
](assets/generations/example-2.png)
188 | [
](assets/generations/example-3.png)
189 | [
](assets/generations/example-4.png)
190 | [
](assets/generations/example-6.png)
191 | [
](assets/generations/example-7.png)
192 | [
](assets/generations/example-8.png)
193 | [
](assets/generations/example-9.png)
194 | [
](assets/generations/example-10.png)
195 | [
](assets/generations/example-11.png)
196 |
197 | **Feedback and Support:** Encountered an issue or have a suggestion? Join our
198 | [Discord community](https://discord.com/invite/m3TBB9XEkb) or open an issue on GitHub. We'd love to
199 | hear from you!
200 |
201 | ## Node Version Managers
202 |
203 | Node.js has a [list of alternative installation options](https://nodejs.org/en/download/package-manager#nvm).
204 |
205 | **Cross Platform**
206 |
207 | - [fnm](https://github.com/Schniz/fnm)
208 |
209 | **Windows**
210 |
211 | - [nvm-windows](https://github.com/coreybutler/nvm-windows)
212 | - [nodist](https://github.com/marcelklehr/nodist)
213 | - [nvs](https://github.com/jasongin/nvs)
214 |
215 | **macOS/Linux**
216 |
217 | - [nvm](https://github.com/nvm-sh/nvm)
218 | - [n](https://github.com/tj/n)
219 |
220 |
--------------------------------------------------------------------------------
/assets/crop.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/blib-la/LoRAdo/d3138e11d6c98e8ad7fe73c2a9d7fea5474ee8ed/assets/crop.png
--------------------------------------------------------------------------------
/assets/folder.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/blib-la/LoRAdo/d3138e11d6c98e8ad7fe73c2a9d7fea5474ee8ed/assets/folder.png
--------------------------------------------------------------------------------
/assets/generations/example-1.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/blib-la/LoRAdo/d3138e11d6c98e8ad7fe73c2a9d7fea5474ee8ed/assets/generations/example-1.png
--------------------------------------------------------------------------------
/assets/generations/example-10.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/blib-la/LoRAdo/d3138e11d6c98e8ad7fe73c2a9d7fea5474ee8ed/assets/generations/example-10.png
--------------------------------------------------------------------------------
/assets/generations/example-11.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/blib-la/LoRAdo/d3138e11d6c98e8ad7fe73c2a9d7fea5474ee8ed/assets/generations/example-11.png
--------------------------------------------------------------------------------
/assets/generations/example-2.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/blib-la/LoRAdo/d3138e11d6c98e8ad7fe73c2a9d7fea5474ee8ed/assets/generations/example-2.png
--------------------------------------------------------------------------------
/assets/generations/example-3.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/blib-la/LoRAdo/d3138e11d6c98e8ad7fe73c2a9d7fea5474ee8ed/assets/generations/example-3.png
--------------------------------------------------------------------------------
/assets/generations/example-4.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/blib-la/LoRAdo/d3138e11d6c98e8ad7fe73c2a9d7fea5474ee8ed/assets/generations/example-4.png
--------------------------------------------------------------------------------
/assets/generations/example-5.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/blib-la/LoRAdo/d3138e11d6c98e8ad7fe73c2a9d7fea5474ee8ed/assets/generations/example-5.png
--------------------------------------------------------------------------------
/assets/generations/example-6.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/blib-la/LoRAdo/d3138e11d6c98e8ad7fe73c2a9d7fea5474ee8ed/assets/generations/example-6.png
--------------------------------------------------------------------------------
/assets/generations/example-7.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/blib-la/LoRAdo/d3138e11d6c98e8ad7fe73c2a9d7fea5474ee8ed/assets/generations/example-7.png
--------------------------------------------------------------------------------
/assets/generations/example-8.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/blib-la/LoRAdo/d3138e11d6c98e8ad7fe73c2a9d7fea5474ee8ed/assets/generations/example-8.png
--------------------------------------------------------------------------------
/assets/generations/example-9.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/blib-la/LoRAdo/d3138e11d6c98e8ad7fe73c2a9d7fea5474ee8ed/assets/generations/example-9.png
--------------------------------------------------------------------------------
/assets/projects.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/blib-la/LoRAdo/d3138e11d6c98e8ad7fe73c2a9d7fea5474ee8ed/assets/projects.png
--------------------------------------------------------------------------------
/assets/slideshow.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/blib-la/LoRAdo/d3138e11d6c98e8ad7fe73c2a9d7fea5474ee8ed/assets/slideshow.png
--------------------------------------------------------------------------------
/assets/ui.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/blib-la/LoRAdo/d3138e11d6c98e8ad7fe73c2a9d7fea5474ee8ed/assets/ui.png
--------------------------------------------------------------------------------
/client/components/FaceDetectionImage.tsx:
--------------------------------------------------------------------------------
1 | import { Box } from "@mui/joy";
2 | import * as faceapi from "@vladmandic/face-api/dist/face-api.esm-nobundle.js";
3 | import type { ImageProps } from "next/image";
4 | import Image from "next/image";
5 | import { useEffect, useRef, useState } from "react";
6 |
7 | import type { FaceBox } from "@/types";
8 |
9 | export default function FaceDetectionImage({
10 | alt,
11 | onFace,
12 | noDetection,
13 | faceBox,
14 | ...props
15 | }: ImageProps & { onFace?(hasFace: FaceBox): void; noDetection?: boolean; faceBox?: FaceBox }) {
16 | const [box, setBox] = useState(faceBox);
17 | const [modelsLoaded, setModelsLoaded] = useState(false);
18 | const [imageLoaded, setImageLoaded] = useState(false);
19 | const imgRef = useRef(null);
20 |
21 | useEffect(() => {
22 | if (!noDetection) {
23 | const loadModels = async () => {
24 | const MODEL_URL = "/face-api/models";
25 | await faceapi.nets.tinyFaceDetector.load(MODEL_URL);
26 | await faceapi.nets.faceLandmark68Net.load(MODEL_URL);
27 | await faceapi.nets.faceRecognitionNet.load(MODEL_URL);
28 | };
29 |
30 | loadModels().then(() => {
31 | setModelsLoaded(true);
32 | });
33 | }
34 | }, [noDetection]);
35 |
36 | useEffect(() => {
37 | const detectFace = async () => {
38 | if (imgRef.current && modelsLoaded && imageLoaded) {
39 | const detections = await faceapi.detectAllFaces(
40 | imgRef.current,
41 | new faceapi.TinyFaceDetectorOptions()
42 | );
43 | if (detections.length > 0) {
44 | const detectionBox = detections[0].box;
45 |
46 | // Use naturalWidth and naturalHeight to get original image dimensions
47 | const xPercentage = (detectionBox.x / imgRef.current.naturalWidth) * 100;
48 | const yPercentage = (detectionBox.y / imgRef.current.naturalHeight) * 100;
49 | const widthPercentage =
50 | (detectionBox.width / imgRef.current.naturalWidth) * 100;
51 | const heightPercentage =
52 | (detectionBox.height / imgRef.current.naturalHeight) * 100;
53 |
54 | setBox({
55 | xPercentage,
56 | yPercentage,
57 | widthPercentage,
58 | heightPercentage,
59 | });
60 | if (onFace) {
61 | onFace({
62 | xPercentage,
63 | yPercentage,
64 | widthPercentage,
65 | heightPercentage,
66 | });
67 | }
68 | } else if (onFace) {
69 | setBox({
70 | xPercentage: 10,
71 | yPercentage: 10,
72 | widthPercentage: 80,
73 | heightPercentage: 80,
74 | });
75 | onFace({
76 | xPercentage: 10,
77 | yPercentage: 10,
78 | widthPercentage: 80,
79 | heightPercentage: 80,
80 | });
81 | }
82 | }
83 | };
84 |
85 | if (!noDetection) {
86 | detectFace();
87 | }
88 | }, [modelsLoaded, imageLoaded, noDetection]);
89 |
90 | return (
91 |
100 | setImageLoaded(true)}
108 | />
109 | {box && (
110 |
122 | )}
123 |
124 | );
125 | }
126 |
--------------------------------------------------------------------------------
/client/components/FileUpload.tsx:
--------------------------------------------------------------------------------
1 | import PermMediaIcon from "@mui/icons-material/PermMedia";
2 | import { Typography, Sheet, Box } from "@mui/joy";
3 | import { nanoid } from "nanoid";
4 | import type { DragEvent } from "react";
5 |
6 | import type { ImageData } from "@/types";
7 | import { resizeImage } from "@/utils/traverseFileTree";
8 | interface FileUploadProps {
9 | min: number;
10 | ok: number;
11 | recommended: number;
12 | onDrop(event: DragEvent): void;
13 | onLoad(imageData: ImageData): void;
14 | }
15 | export default function FileUpload({ onDrop, onLoad, min, ok, recommended }: FileUploadProps) {
16 | return (
17 | e.preventDefault()}
34 | >
35 |
46 |
47 |
48 |
49 |
50 | Drop files or folders here or click to select files from disk.
51 |
52 |
53 | Minimum: {min} image{min === 1 ? "" : "s"}
54 |
55 |
56 | Better: {ok} images or more
57 |
58 |
59 | Recommended: {recommended} images or more
60 |
61 |
62 |
63 | {
74 | if (event.target.files) {
75 | Array.from(event.target.files).forEach(file => {
76 | if (file.type.startsWith("image/")) {
77 | const reader = new FileReader();
78 | reader.onload = event => {
79 | const image = new Image();
80 | image.src = event.target!.result as string;
81 | image.onload = async () => {
82 | const maxWidth = 300;
83 | const resizedDataUrl = await resizeImage(
84 | image,
85 | maxWidth,
86 | maxWidth * (1535 / 640) // SDXL max
87 | );
88 | onLoad({
89 | id: nanoid(),
90 | data: image.src,
91 | src: resizedDataUrl,
92 | name: file.name,
93 | width: image.width,
94 | height: image.height,
95 | caption: "",
96 | });
97 | };
98 | };
99 |
100 | reader.readAsDataURL(file);
101 | }
102 | });
103 | }
104 | }}
105 | />
106 |
107 | );
108 | }
109 |
--------------------------------------------------------------------------------
/client/components/Header.tsx:
--------------------------------------------------------------------------------
1 | import { Option, Select, Sheet } from "@mui/joy";
2 | import { useRouter } from "next/router";
3 |
4 | export function plural(word: string, count: number, singular = "", plural = "s") {
5 | if (count === 1) {
6 | return "1 " + word;
7 | }
8 |
9 | if (singular === "") {
10 | return `${count} ${word + plural}`;
11 | }
12 |
13 | return `${count} ${word.replace(new RegExp(`${singular}$`), plural)}`;
14 | }
15 |
16 | export function Header({ directories }: { directories: { fullPath: string; id: string }[] }) {
17 | const {
18 | push,
19 | query: { id = "new" },
20 | } = useRouter();
21 | return (
22 |
34 | {plural("project", directories.length)}
35 |
50 |
51 | );
52 | }
53 |
--------------------------------------------------------------------------------
/client/components/ImageItem.tsx:
--------------------------------------------------------------------------------
1 | import CheckIcon from "@mui/icons-material/CheckCircle";
2 | import DeleteIcon from "@mui/icons-material/Delete";
3 | import FileUploadIcon from "@mui/icons-material/FileUpload";
4 | import SaveIcon from "@mui/icons-material/Save";
5 | import {
6 | IconButton,
7 | Typography,
8 | Card,
9 | CardContent,
10 | Box,
11 | Textarea,
12 | FormControl,
13 | FormLabel,
14 | FormHelperText,
15 | CircularProgress,
16 | useTheme,
17 | } from "@mui/joy";
18 | import dynamic from "next/dynamic";
19 | import type { ChangeEventHandler } from "react";
20 | import { useState } from "react";
21 |
22 | import type { FaceBox, ImageData } from "@/types";
23 |
24 | const FaceDetectionImage = dynamic(() => import("@/components/FaceDetectionImage"), {
25 | ssr: false,
26 | });
27 | export interface ImageItemProps {
28 | upload?: boolean;
29 | modified?: boolean;
30 | demo?: boolean;
31 | image: ImageData;
32 | onRemove?: () => void;
33 | onSave?: () => void;
34 | onOpen?: () => void;
35 | onFace?: (faceBox: FaceBox) => void;
36 | onCaptionChange?: ChangeEventHandler;
37 | }
38 |
39 | export function StateIcon({ loading, done }: { loading?: boolean; done?: boolean }) {
40 | const theme = useTheme();
41 | if (loading) {
42 | return ;
43 | }
44 |
45 | return done ? (
46 |
47 | ) : (
48 |
49 | );
50 | }
51 |
52 | export default function ImageItem({
53 | image,
54 | demo,
55 | modified,
56 | onRemove,
57 | onSave,
58 | onOpen,
59 | onFace,
60 | onCaptionChange,
61 | }: ImageItemProps) {
62 | const [faceDetection, setFaceDetection] = useState(Boolean(image.faceBox));
63 |
64 | const hasGoodSize = Math.min(image.width, image.height) >= 1536;
65 |
66 | return (
67 |
77 |
78 | {
79 |
83 | }
84 | sx={{ mr: 6 }}
85 | >
86 | {image.name}
87 |
88 | }
89 |
90 | {onRemove && !modified && (
91 |
99 |
100 |
101 | )}
102 | {onSave && modified && (
103 |
111 |
112 |
113 | )}
114 |
115 |
116 |
117 |
118 | Dimensions: {image.width}x{image.height}
119 |
120 |
121 |
122 |
135 | {
147 | setFaceDetection(true);
148 | if (onFace) {
149 | onFace(faceBox);
150 | }
151 | }}
152 | />
153 |
154 |
155 | Caption
156 |
157 | Describe the subject in the image
158 |
159 |
160 | );
161 | }
162 |
--------------------------------------------------------------------------------
/client/components/ImageWithErrorHandling.tsx:
--------------------------------------------------------------------------------
1 | import type { ImageProps } from "next/image";
2 | import Image from "next/image";
3 | import { useState } from "react";
4 |
5 | interface EnhancedImageProps extends ImageProps {
6 | maxTries?: number;
7 | fallbackSrc?: string;
8 | }
9 |
10 | export function ImageWithErrorHandling({
11 | maxTries = 3,
12 | fallbackSrc,
13 | ...props
14 | }: EnhancedImageProps) {
15 | const [loadTries, setLoadTries] = useState(0);
16 | const [showFallback, setShowFallback] = useState(false);
17 |
18 | const handleImageError = () => {
19 | if (loadTries >= maxTries - 1 && fallbackSrc) {
20 | setShowFallback(true);
21 | } else if (loadTries < maxTries) {
22 | setLoadTries(loadTries + 1);
23 | }
24 | };
25 |
26 | const imageProps = showFallback
27 | ? { ...props, src: fallbackSrc }
28 | : { ...props, onError: handleImageError };
29 |
30 | return ;
31 | }
32 |
--------------------------------------------------------------------------------
/client/components/Layout.tsx:
--------------------------------------------------------------------------------
1 | import { Container } from "@mui/joy";
2 | import type { ReactNode } from "react";
3 |
4 | import { Header } from "@/components/Header";
5 |
6 | interface LayoutProps {
7 | children?: ReactNode;
8 | directories: { fullPath: string; id: string }[];
9 | }
10 |
11 | export default function Layout({ children, directories }: LayoutProps) {
12 | return (
13 | <>
14 |
15 |
16 | {children}
17 |
18 | >
19 | );
20 | }
21 |
--------------------------------------------------------------------------------
/client/components/Masonry.tsx:
--------------------------------------------------------------------------------
1 | import MuiMasonry from "@mui/lab/Masonry";
2 | import type { ReactNode } from "react";
3 |
4 | interface MasonryProps {
5 | children: NonNullable;
6 | }
7 |
8 | export default function Masonry({ children }: MasonryProps) {
9 | return (
10 |
11 | {children}
12 |
13 | );
14 | }
15 |
--------------------------------------------------------------------------------
/client/components/ProjectsTable.tsx:
--------------------------------------------------------------------------------
1 | import DeleteIcon from "@mui/icons-material/Delete";
2 | import SaveIcon from "@mui/icons-material/Save";
3 | import { Box, IconButton, Table, Textarea } from "@mui/joy";
4 | import type { ChangeEvent } from "react";
5 |
6 | import { ImageWithErrorHandling } from "@/components/ImageWithErrorHandling";
7 | import type { ImageUpload } from "@/types";
8 |
9 | export interface ProjectsTableProps {
10 | rows: ImageUpload[];
11 | onCaptionChange(id: string, value: string): void;
12 | onSave(image: ImageUpload): void;
13 | onRemove(image: ImageUpload): void;
14 | onClick(index: number): void;
15 | }
16 |
17 | interface TableRowProps {
18 | image: ImageUpload;
19 | onCaptionChange(id: string, value: string): void;
20 | onSave(): void;
21 | onRemove(): void;
22 | onClick(): void;
23 | }
24 |
25 | export function TableRow({ image, onCaptionChange, onSave, onRemove, onClick }: TableRowProps) {
26 | const { src, alt, height, width, modified } = image;
27 | const name = src.split("/").pop();
28 |
29 | const handleCaptionChange = (event: ChangeEvent) => {
30 | onCaptionChange(src, event.target.value);
31 | };
32 |
33 | return (
34 |
35 |
36 |
41 |
54 |
55 |
56 |
57 | {name}
58 |
59 |
60 |
61 | |
62 |
63 | {modified ? (
64 | {
68 | onSave();
69 | }}
70 | >
71 |
72 |
73 | ) : (
74 | {
78 | onRemove();
79 | }}
80 | >
81 |
82 |
83 | )}
84 |
85 |
86 | );
87 | }
88 |
89 | export function ProjectsTable(props: ProjectsTableProps) {
90 | const { rows, onCaptionChange, onRemove, onSave, onClick } = props;
91 |
92 | return (
93 |
94 |
95 | {rows.map((image, index) => (
96 | onSave(image)}
101 | onRemove={() => onRemove(image)}
102 | onClick={() => onClick(index)}
103 | />
104 | ))}
105 |
106 |
107 | );
108 | }
109 |
--------------------------------------------------------------------------------
/client/components/SlideshowModal.tsx:
--------------------------------------------------------------------------------
1 | import ArrowBackIcon from "@mui/icons-material/ArrowBack";
2 | import ArrowForwardIcon from "@mui/icons-material/ArrowForward";
3 | import CancelIcon from "@mui/icons-material/Cancel";
4 | import DeleteIcon from "@mui/icons-material/Delete";
5 | import DeleteForeverIcon from "@mui/icons-material/DeleteForever";
6 | import { Box, Modal, ModalClose, Sheet, IconButton, Typography, Button, Textarea } from "@mui/joy";
7 | import Image from "next/image";
8 | import { useState } from "react";
9 |
10 | import type { ImageData } from "@/types";
11 | interface SlideshowModalProps {
12 | images: Array;
13 | currentIndex: number | null;
14 | isOpen: boolean;
15 | onClose: () => void;
16 | onNext: () => void;
17 | onPrev: () => void;
18 | onDelete: (index: number) => void;
19 | onCaptionChange: (index: number, value: string) => void;
20 | }
21 |
22 | export default function SlideshowModal({
23 | images,
24 | currentIndex,
25 | isOpen,
26 | onClose,
27 | onNext,
28 | onPrev,
29 | onCaptionChange,
30 | onDelete,
31 | }: SlideshowModalProps) {
32 | const currentImage = images[currentIndex ?? 0];
33 | const [confirm, setConfirm] = useState(false);
34 |
35 | return (
36 |
43 |
56 |
57 |
58 |
66 | {`Image ${currentIndex === null ? 0 : currentIndex + 1} of ${images.length}`}
67 |
68 |
77 | {
79 | onPrev();
80 | setConfirm(false);
81 | }}
82 | >
83 |
84 |
85 | {currentImage && (
86 |
98 |
107 |
119 |
120 |
121 | {currentImage.width}x{currentImage.height}
122 |
123 |
124 | {confirm ? (
125 | <>
126 | }
128 | onClick={() => {
129 | setConfirm(false);
130 | }}
131 | >
132 | Cancel
133 |
134 | }
137 | onClick={() => {
138 | setConfirm(false);
139 | if (currentIndex !== null) {
140 | onDelete(currentIndex);
141 | }
142 | }}
143 | >
144 | Confirm
145 |
146 | >
147 | ) : (
148 | }
150 | onClick={() => {
151 | setConfirm(true);
152 | }}
153 | >
154 | Remove
155 |
156 | )}
157 |
158 |
168 |
169 |
170 | )}
171 | {
173 | onNext();
174 | setConfirm(false);
175 | }}
176 | >
177 |
178 |
179 |
180 |
181 |
182 | );
183 | }
184 |
--------------------------------------------------------------------------------
/client/components/SynchedSliderInput.tsx:
--------------------------------------------------------------------------------
1 | import { Box, FormControl, FormLabel, Slider, Input, FormHelperText } from "@mui/joy";
2 | import type { ChangeEvent, ForwardRefRenderFunction } from "react";
3 | import { forwardRef } from "react";
4 |
5 | interface SyncedSliderInputProps {
6 | value: number;
7 | onChange: (value: number) => void;
8 | name?: string;
9 | min?: number;
10 | max?: number;
11 | label: string;
12 | helperText?: string;
13 | }
14 |
15 | const SyncedSliderInput: ForwardRefRenderFunction = (
16 | { value, name, onChange, min = 1, max = 30, label, helperText, ...props },
17 | ref
18 | ) => {
19 | const handleSliderChange = (event: Event, newValue: number | number[]) => {
20 | onChange(newValue as number);
21 | };
22 |
23 | const handleInputChange = (event: ChangeEvent) => {
24 | const inputValue = Math.min(max, Math.max(min, Number(event.target.value)));
25 | onChange(inputValue);
26 | };
27 |
28 | return (
29 |
30 |
31 | {label}
32 |
33 |
42 |
51 |
52 | {helperText && {helperText}}
53 |
54 |
55 | );
56 | };
57 |
58 | export default forwardRef(SyncedSliderInput);
59 |
--------------------------------------------------------------------------------
/client/data/exampleImages.ts:
--------------------------------------------------------------------------------
1 | import type { ImageData } from "@/types";
2 |
3 | export const exampleImages: ImageData[] = [
4 | {
5 | id: "1",
6 | faceBox: {
7 | yPercentage: 12.0428,
8 | xPercentage: 28.6085,
9 | widthPercentage: 23.7071,
10 | heightPercentage: 15.2706,
11 | },
12 | caption:
13 | "photo of a woman with medium long auburn hair, wearing a white dress with burgundy floral pattern, from side, looking at the viewer, leaning against a white wall",
14 | name: "Example woman 1",
15 | width: 3_335,
16 | height: 4_668,
17 | src: "/images/anamnesis33/example (1).jpg",
18 | },
19 | {
20 | id: "2",
21 | faceBox: {
22 | yPercentage: 15.8052,
23 | xPercentage: 22.6733,
24 | widthPercentage: 23.276,
25 | heightPercentage: 33.6678,
26 | },
27 | caption:
28 | "photo of a woman with auburn hair tied up, wearing a dark blue dress with floral and butterfly pattern, looking to the side, leaning against a boulder",
29 | name: "Example woman 2",
30 | width: 6_000,
31 | height: 4_000,
32 | src: "/images/anamnesis33/example (2).jpg",
33 | },
34 | {
35 | id: "3",
36 | faceBox: {
37 | yPercentage: 12.3337,
38 | xPercentage: 52.7403,
39 | widthPercentage: 25.2,
40 | heightPercentage: 14.1753,
41 | },
42 | caption:
43 | "photo of a woman with shoulder long auburn hair, wearing a turquoise turtleneck sweater, blue jeans and white sneakers, looking at the viewer, sitting on a stairway",
44 | name: "Example woman 3",
45 | width: 3_727,
46 | height: 5_591,
47 | src: "/images/anamnesis33/example (3).jpg",
48 | },
49 | {
50 | id: "4",
51 | faceBox: {
52 | yPercentage: 37.9282,
53 | xPercentage: 14.8775,
54 | widthPercentage: 48.4269,
55 | heightPercentage: 30.7289,
56 | },
57 | caption:
58 | "photo of a woman with auburn tied up hair, wearing a red dress with white blossom pattern, hand against neck, standing near a road with cars, looking at the viewer, depth of field",
59 | name: "Example woman 4",
60 | width: 4_000,
61 | height: 6_000,
62 | src: "/images/anamnesis33/example (4).jpg",
63 | },
64 | ];
65 |
--------------------------------------------------------------------------------
/client/data/kohyaConfig.ts:
--------------------------------------------------------------------------------
1 | /* eslint-disable camelcase */
2 | export const kohyaConfig = {
3 | LoRA_type: "Standard",
4 | adaptive_noise_scale: 0,
5 | additional_parameters: "",
6 | block_alphas: "",
7 | block_dims: "",
8 | block_lr_zero_threshold: "",
9 | bucket_no_upscale: true,
10 | bucket_reso_steps: 64,
11 | cache_latents: true,
12 | cache_latents_to_disk: true,
13 | caption_dropout_every_n_epochs: 0.0,
14 | caption_dropout_rate: 0,
15 | caption_extension: ".txt",
16 | clip_skip: "1",
17 | color_aug: false,
18 | conv_alpha: 1,
19 | conv_alphas: "",
20 | conv_dim: 1,
21 | conv_dims: "",
22 | decompose_both: false,
23 | dim_from_weights: false,
24 | down_lr_weight: "",
25 | enable_bucket: true,
26 | epoch: 1,
27 | factor: -1,
28 | flip_aug: false,
29 | full_bf16: false,
30 | full_fp16: false,
31 | gradient_accumulation_steps: 1.0,
32 | gradient_checkpointing: true,
33 | keep_tokens: "0",
34 | learning_rate: 0.0004,
35 | logging_dir: "",
36 | lora_network_weights: "",
37 | lr_scheduler: "constant",
38 | lr_scheduler_num_cycles: "",
39 | lr_scheduler_power: "",
40 | lr_warmup: 0,
41 | max_bucket_reso: 2048,
42 | max_data_loader_n_workers: "0",
43 | max_resolution: "1536,1536",
44 | max_timestep: 1000,
45 | max_token_length: "75",
46 | max_train_epochs: "",
47 | mem_eff_attn: false,
48 | mid_lr_weight: "",
49 | min_bucket_reso: 256,
50 | min_snr_gamma: 0,
51 | min_timestep: 0,
52 | mixed_precision: "bf16",
53 | model_list: "custom",
54 | module_dropout: 0,
55 | multires_noise_discount: 0,
56 | multires_noise_iterations: 0,
57 | network_alpha: 1,
58 | network_dim: 256,
59 | network_dropout: 0,
60 | no_token_padding: false,
61 | noise_offset: 0,
62 | noise_offset_type: "Original",
63 | num_cpu_threads_per_process: 2,
64 | optimizer: "Adafactor",
65 | optimizer_args: "scale_parameter=False relative_step=False warmup_init=False",
66 | output_dir: "",
67 | output_name: "",
68 | persistent_data_loader_workers: false,
69 | pretrained_model_name_or_path: "",
70 | prior_loss_weight: 1.0,
71 | random_crop: false,
72 | rank_dropout: 0,
73 | reg_data_dir: "",
74 | resume: "",
75 | sample_every_n_epochs: 0,
76 | sample_every_n_steps: 0,
77 | sample_prompts: "",
78 | sample_sampler: "euler_a",
79 | save_every_n_epochs: 1,
80 | save_every_n_steps: 0,
81 | save_last_n_steps: 0,
82 | save_last_n_steps_state: 0,
83 | save_model_as: "safetensors",
84 | save_precision: "bf16",
85 | save_state: false,
86 | scale_v_pred_loss_like_noise_pred: false,
87 | scale_weight_norms: 0,
88 | sdxl: true,
89 | sdxl_cache_text_encoder_outputs: false,
90 | sdxl_no_half_vae: true,
91 | seed: "",
92 | shuffle_caption: false,
93 | stop_text_encoder_training: 0,
94 | text_encoder_lr: 0.0004,
95 | train_batch_size: 1,
96 | train_data_dir: "",
97 | train_on_input: false,
98 | training_comment: "",
99 | unet_lr: 0.0004,
100 | unit: 1,
101 | up_lr_weight: "",
102 | use_cp: false,
103 | use_wandb: false,
104 | v2: false,
105 | v_parameterization: false,
106 | vae_batch_size: 0,
107 | wandb_api_key: "",
108 | weighted_captions: false,
109 | xformers: true,
110 | };
111 | /* eslint-enable camelcase */
112 |
--------------------------------------------------------------------------------
/client/services/prepare/crop.ts:
--------------------------------------------------------------------------------
1 | // Configuration for the face-api model path
2 | import path from "node:path";
3 |
4 | import * as tf from "@tensorflow/tfjs-node";
5 | import type { TResolvedNetInput } from "@vladmandic/face-api";
6 | import * as faceApi from "@vladmandic/face-api";
7 | import sharp from "sharp";
8 |
9 | export const MODEL_URL = path.join(process.cwd(), "public/face-api/models");
10 |
11 | /**
12 | * Load models required by face-api.
13 | * It sets up TensorFlow backend, and loads the SSD MobileNet V1 model.
14 | */
15 | export async function loadModels() {
16 | // eslint-disable-next-line @typescript-eslint/ban-ts-comment
17 | // @ts-ignore
18 | await faceApi.tf.setBackend("tensorflow");
19 | // eslint-disable-next-line @typescript-eslint/ban-ts-comment
20 | // @ts-ignore
21 | await faceApi.tf.enableProdMode();
22 | await faceApi.tf.ENV.set("DEBUG", false);
23 | // eslint-disable-next-line @typescript-eslint/ban-ts-comment
24 | // @ts-ignore
25 | await faceApi.tf.ready();
26 |
27 | await faceApi.nets.ssdMobilenetv1.loadFromDisk(MODEL_URL);
28 | }
29 |
30 | /**
31 | * Crops the image around the detected face based on a given zoom level.
32 | * @param {string} inputImagePath - Path to the input image.
33 | * @param {Object} outputDimensions - Desired output dimensions.
34 | * @param {number} zoomLevel - Zoom level, between 0 (minimal zoom) and 1 (maximum zoom).
35 | * @returns {Promise} - A buffer containing the cropped image.
36 | */
37 | export async function cropImageToFace(
38 | inputImagePath: string,
39 | outputDimensions: { height: number; width: number },
40 | zoomLevel = 0
41 | ) {
42 | const { data, info } = await sharp(inputImagePath).raw().toBuffer({ resolveWithObject: true });
43 |
44 | const imageTensor = tf.tensor3d(data, [
45 | info.height,
46 | info.width,
47 | info.channels,
48 | ]) as unknown as TResolvedNetInput;
49 | let width;
50 | let height;
51 | let left;
52 | let top;
53 |
54 | let minimalWidth;
55 | let minimalHeight; // For zoomLevel: 0
56 | let maximalWidth;
57 | let maximalHeight; // For zoomLevel: 1
58 |
59 | const detections = await faceApi.detectAllFaces(imageTensor);
60 | // Calculations for zoomLevel: 0
61 | const aspectRatioOriginal = info.width / info.height;
62 | const aspectRatioDesired = outputDimensions.width / outputDimensions.height;
63 | if (detections.length === 0) {
64 | // Set the center to the center of the image
65 | const centerX = info.width / 2;
66 | const centerY = info.height / 2;
67 |
68 | if (aspectRatioOriginal > aspectRatioDesired) {
69 | // Original image is wider, crop sides
70 | minimalHeight = info.height;
71 | minimalWidth = minimalHeight * aspectRatioDesired;
72 | } else {
73 | // Original image is taller, crop top and bottom
74 | minimalWidth = info.width;
75 | minimalHeight = minimalWidth / aspectRatioDesired;
76 | }
77 |
78 | width = minimalWidth;
79 | height = minimalHeight;
80 |
81 | // Center the cropping box around the center of the image
82 | left = centerX - width / 2;
83 | top = centerY - height / 2;
84 | } else {
85 | const faceBox = detections[0].box;
86 |
87 | const faceCenterX = faceBox.x + faceBox.width / 2;
88 | const faceCenterY = faceBox.y + faceBox.height / 2;
89 |
90 | if (aspectRatioOriginal > aspectRatioDesired) {
91 | // Original image is wider, crop sides
92 | minimalHeight = info.height;
93 | minimalWidth = minimalHeight * aspectRatioDesired;
94 | } else {
95 | // Original image is taller, crop top and bottom
96 | minimalWidth = info.width;
97 | minimalHeight = minimalWidth / aspectRatioDesired;
98 | }
99 |
100 | // Calculations for zoomLevel: 1
101 | if (aspectRatioOriginal > aspectRatioDesired) {
102 | // If the image is wider (landscape)
103 | maximalHeight = faceBox.height;
104 | maximalWidth = maximalHeight * aspectRatioDesired;
105 | } else {
106 | // If the image is taller (portrait)
107 | maximalWidth = faceBox.width;
108 | maximalHeight = maximalWidth / aspectRatioDesired;
109 | }
110 |
111 | // Linearly interpolate width and height based on zoomLevel
112 | width = minimalWidth + zoomLevel * (maximalWidth - minimalWidth);
113 | height = minimalHeight + zoomLevel * (maximalHeight - minimalHeight);
114 |
115 | // Center the cropping box around the face
116 | left = faceCenterX - width / 2;
117 | top = faceCenterY - height / 2;
118 | }
119 |
120 | // Ensure cropping coordinates are within the image bounds
121 | left = Math.max(0, Math.min(info.width - width, left));
122 | top = Math.max(0, Math.min(info.height - height, top));
123 |
124 | return sharp(inputImagePath)
125 | .extract({
126 | left: Math.round(left),
127 | top: Math.round(top),
128 | width: Math.round(width),
129 | height: Math.round(height),
130 | })
131 | .resize(outputDimensions.width, outputDimensions.height)
132 | .png()
133 | .toBuffer();
134 | }
135 |
--------------------------------------------------------------------------------
/client/services/prepare/prepare.ts:
--------------------------------------------------------------------------------
1 | import fs from "node:fs/promises";
2 | import path from "node:path";
3 |
4 | import { cropImageToFace, loadModels } from "./crop";
5 | import { ensureDirExists, getClosestSize, getImageDimensions } from "./utils";
6 |
7 | import { sizes15 } from "@/services/prepare/sizes";
8 | import type { ImageUpload } from "@/types";
9 |
10 | export async function prepareImage({
11 | crop = false,
12 | image,
13 | repeats = 1,
14 | zoomLevels = [0],
15 | className,
16 | subject,
17 | sdxl,
18 | outDir,
19 | counter,
20 | sizes,
21 | }: {
22 | crop?: boolean;
23 | counter: number;
24 | image: string;
25 | repeats: number;
26 | zoomLevels?: number[];
27 | className: string;
28 | subject: string;
29 | sdxl: boolean;
30 | outDir: string;
31 | sizes: [number, number][];
32 | }) {
33 | await loadModels();
34 | const urls: ImageUpload[] = [];
35 |
36 | const outFolderName = path.join(outDir, "img", `${repeats}_${subject} ${className}`);
37 |
38 | await ensureDirExists(outFolderName);
39 |
40 | const imageInfo = await getImageDimensions(image);
41 | let caption: string;
42 | try {
43 | caption = await fs.readFile(image.replace(/\.jpe?g$/, ".txt"), "utf-8");
44 | } catch {
45 | caption = `${subject} ${className}`;
46 | }
47 |
48 | const baseSizes = sdxl ? sizes : sizes15;
49 | const baseSize = sdxl ? [1024, 1024] : [512, 512];
50 | const requestedSizes = crop
51 | ? baseSizes
52 | : [
53 | getClosestSize({ height: imageInfo.height!, width: imageInfo.width! }, baseSizes) ??
54 | baseSize,
55 | ];
56 | const failed: string[] = [];
57 | let localCounter = 0;
58 | for (const [width, height] of requestedSizes) {
59 | for (const zoomLevel of zoomLevels) {
60 | if (!failed.includes(image)) {
61 | try {
62 | // eslint-disable-next-line no-await-in-loop
63 | const result = await cropImageToFace(image, { width, height }, zoomLevel);
64 | ++localCounter;
65 | const imageId = `${counter.toString().padStart(4, "0")}.${localCounter
66 | .toString()
67 | .padStart(2, "0")}`;
68 | const filename = `${subject}--${imageId}`;
69 | const outputPath = path.join(outFolderName, `${filename}.png`);
70 | const captionPath = path.join(outFolderName, `${filename}.txt`);
71 | urls.push({
72 | height,
73 | width,
74 | alt: caption,
75 | captionPath,
76 | outputPath,
77 | src: `/api/uploads/${outFolderName.split("training")[1]}/${filename}.png`
78 | .replaceAll("\\", "/")
79 | .replace(/\/+/g, "/"),
80 | });
81 |
82 | // eslint-disable-next-line no-await-in-loop
83 | await fs.writeFile(outputPath, result);
84 | // eslint-disable-next-line no-await-in-loop
85 | await fs.writeFile(captionPath, caption);
86 | } catch (error) {
87 | failed.push(image);
88 | console.log(`Failed on image:`, image);
89 | }
90 | }
91 | }
92 | }
93 |
94 | return urls;
95 | }
96 | /*
97 | Let counter = 0;
98 | const images = Array.from(
99 | { length: 30 },
100 | (_, index) => `./images/anamnesis33 (${index + 1}).jpg`,
101 | );
102 |
103 | const zoomLevels = [0];
104 | const repeats = Math.max(5, Math.ceil(150 / images.length));
105 | const className = "woman";
106 | const subject = "ohwx";
107 |
108 | await Promise.all(
109 | images.map((image) => {
110 | return prepareImage({
111 | image,
112 | counter: ++counter,
113 | sizes,
114 | zoomLevels,
115 | repeats,
116 | className,
117 | subject,
118 | outDir: `./outImg/${Date.now()}`,
119 | });
120 | }),
121 | );
122 | */
123 |
--------------------------------------------------------------------------------
/client/services/prepare/reg.ts:
--------------------------------------------------------------------------------
1 | import fs from "node:fs/promises";
2 | import path from "node:path";
3 |
4 | import axios from "axios";
5 | import { createApi } from "unsplash-js";
6 | import type { ApiResponse } from "unsplash-js/dist/helpers/response";
7 | import type { Random } from "unsplash-js/dist/methods/photos/types";
8 |
9 | import { cropImageToFace, loadModels } from "./crop";
10 | import { sizes } from "./sizes";
11 | import { ensureDirExists, getClosestSize } from "./utils";
12 |
13 | interface User {
14 | name: string;
15 | username: string;
16 | profile_image: string;
17 | profile_url: string;
18 | }
19 |
20 | interface ImageData {
21 | imageName: string; // Adjust type as needed
22 | id: string | number; // Adjust type as needed
23 | description: string | null;
24 | alt_description: string | null;
25 | user: User;
26 | link: string;
27 | }
28 |
29 | if (!process.env.UNSPLASH_ACCESS_KEY) {
30 | throw new Error("Missing UNSPLASH_ACCESS_KEY");
31 | }
32 |
33 | const unsplash = createApi({
34 | accessKey: process.env.UNSPLASH_ACCESS_KEY,
35 | });
36 |
37 | export async function getRegularisationImages({
38 | className,
39 | totalImagesToFetch,
40 | crop = false,
41 | outDir: outDir_,
42 | }: {
43 | className: string;
44 | totalImagesToFetch: number;
45 | crop?: boolean;
46 | outDir: string;
47 | }) {
48 | const outDir = path.join(outDir_, `1_${className}`);
49 |
50 | await ensureDirExists(outDir);
51 | await loadModels();
52 |
53 | let totalImagesFetched = 0;
54 | let imageCounter = 0;
55 | const fetchedImageIds = new Set();
56 | const imageAttributions: ImageData[] = [];
57 |
58 | async function fetchCropAndSave(imageInfo: Random, zoomLevel = 0) {
59 | if (fetchedImageIds.has(imageInfo.id)) {
60 | console.warn(`Skipping duplicate image with ID ${imageInfo.id}`);
61 | return;
62 | }
63 |
64 | const imageResponse = await axios.get(imageInfo.urls.full, {
65 | responseType: "arraybuffer",
66 | });
67 | // We either crop to all sizes or just use the closest resolution
68 | const requestedSizes = crop
69 | ? sizes
70 | : [
71 | getClosestSize({ height: imageInfo.height, width: imageInfo.width }, sizes) ?? [
72 | 1024, 1024,
73 | ],
74 | ];
75 |
76 | for (const [width, height] of requestedSizes) {
77 | const imageName = `${className} (${imageCounter + 1})`;
78 | const imagePath = path.join(outDir, `${imageName}.jpg`);
79 | const captionPath = path.join(outDir, `${imageName}.txt`);
80 | // eslint-disable-next-line no-await-in-loop
81 | const croppedResult = await cropImageToFace(
82 | imageResponse.data,
83 | { width, height },
84 | zoomLevel
85 | );
86 | // Store the relevant attribution information
87 | imageAttributions.push({
88 | imageName,
89 | id: imageInfo.id,
90 | description: imageInfo.description,
91 | // eslint-disable-next-line camelcase
92 | alt_description: imageInfo.alt_description,
93 | user: {
94 | name: imageInfo.user.name,
95 | username: imageInfo.user.username,
96 | // eslint-disable-next-line camelcase
97 | profile_image: imageInfo.user.profile_image.small,
98 | // eslint-disable-next-line camelcase
99 | profile_url: `https://unsplash.com/@${imageInfo.user.username}`,
100 | },
101 | link: imageInfo.links.html,
102 | });
103 | // Write before each try
104 | // eslint-disable-next-line no-await-in-loop
105 | await fs.writeFile(
106 | path.join(outDir, "attributions.json"),
107 | JSON.stringify(imageAttributions, null, 2)
108 | );
109 | // eslint-disable-next-line no-await-in-loop
110 | await fs.writeFile(imagePath, croppedResult);
111 | // eslint-disable-next-line no-await-in-loop
112 | await fs.writeFile(captionPath, imageInfo.alt_description ?? "");
113 | console.log(`Saved image: ${imagePath} and caption: ${captionPath}`);
114 | imageCounter++;
115 | }
116 |
117 | totalImagesFetched++;
118 |
119 | // Add the imageId to the set after successfully processing
120 | fetchedImageIds.add(imageInfo.id);
121 | }
122 |
123 | while (totalImagesFetched < totalImagesToFetch) {
124 | const remainingImages = totalImagesToFetch - totalImagesFetched;
125 | const count = Math.min(10, remainingImages);
126 |
127 | // eslint-disable-next-line no-await-in-loop
128 | const result = (await unsplash.photos.getRandom({
129 | query: className,
130 | count,
131 | })) as ApiResponse;
132 |
133 | if (result?.response?.length) {
134 | for (const imageInfo of result.response) {
135 | try {
136 | if (totalImagesFetched < totalImagesToFetch) {
137 | // eslint-disable-next-line no-await-in-loop
138 | await fetchCropAndSave(imageInfo, 0);
139 | } else {
140 | break;
141 | }
142 | } catch (error) {
143 | console.error(
144 | `Error processing image from ${imageInfo.urls.full}:`,
145 | (error as Error).message
146 | );
147 | }
148 | }
149 | } else {
150 | console.log("No photos found or an error occurred.");
151 | }
152 | }
153 |
154 | if (totalImagesFetched < totalImagesToFetch) {
155 | console.warn(
156 | `Only fetched ${totalImagesFetched} images out of the desired ${totalImagesToFetch}.`
157 | );
158 | } else {
159 | console.log(`Total images fetched: ${totalImagesFetched}`);
160 | }
161 | }
162 |
163 | /*
164 | Await getRegularisationImages({
165 | outDir: `./out/${Date.now()}`,
166 | className: "woman",
167 | totalImagesToFetch: 10,
168 | crop: true,
169 | });
170 | */
171 |
--------------------------------------------------------------------------------
/client/services/prepare/sizes.ts:
--------------------------------------------------------------------------------
1 | export type Dimension = [number, number];
2 | export const sizes: Dimension[] = [
3 | [640, 1536],
4 | [786, 1344],
5 | [832, 1216],
6 | [896, 1152],
7 | [1024, 1024],
8 | [1152, 896],
9 | [1216, 832],
10 | [1344, 786],
11 | [1536, 640],
12 | ];
13 |
14 | export const sizes15: Dimension[] = [
15 | [512, 768],
16 | [512, 512],
17 | [768, 512],
18 | ];
19 |
--------------------------------------------------------------------------------
/client/services/prepare/utils.ts:
--------------------------------------------------------------------------------
1 | import { readFile } from "fs/promises";
2 | import fs from "node:fs/promises";
3 |
4 | import sizeOf from "image-size";
5 |
6 | /**
7 | * Ensures a directory exists; if not, creates it.
8 | * @param {string} dirPath - The path to the directory.
9 | */
10 | export async function ensureDirExists(dirPath: string) {
11 | try {
12 | await fs.access(dirPath);
13 | } catch (err) {
14 | await fs.mkdir(dirPath, { recursive: true });
15 | }
16 | }
17 |
18 | /**
19 | *
20 | * @param height
21 | * @param width
22 | * @param sizes
23 | * @returns {[number,number]}
24 | */
25 | export function getClosestSize(
26 | { height, width }: { height: number; width: number },
27 | sizes: [number, number][]
28 | ) {
29 | let closestSize = null;
30 | let smallestDifference = Infinity;
31 |
32 | // Calculate the aspect ratio of the provided dimensions
33 | const aspectRatio = width / height;
34 |
35 | for (const [width, height] of sizes) {
36 | const currentRatio = width / height;
37 | const difference = Math.abs(currentRatio - aspectRatio);
38 |
39 | if (difference < smallestDifference) {
40 | smallestDifference = difference;
41 | closestSize = [width, height];
42 | }
43 | }
44 |
45 | return closestSize;
46 | }
47 |
48 | /**
49 | *
50 | * @param filePath
51 | * @returns {Promise<{width: number, height: number}>}
52 | */
53 | export async function getImageDimensions(filePath: string) {
54 | try {
55 | const imageBuffer = await readFile(filePath);
56 | const dimensions = sizeOf(imageBuffer);
57 | return {
58 | width: dimensions.width,
59 | height: dimensions.height,
60 | };
61 | } catch (error) {
62 | console.error("Error reading the image:", error);
63 | throw error;
64 | }
65 | }
66 |
--------------------------------------------------------------------------------
/client/types.ts:
--------------------------------------------------------------------------------
1 | export interface ImageData {
2 | id: string;
3 | caption: string;
4 | data?: string;
5 | src: string;
6 | name: string;
7 | width: number;
8 | height: number;
9 | uploaded?: boolean;
10 | faceBox?: FaceBox;
11 | }
12 |
13 | export interface FormDataModel {
14 | projectName: string;
15 | sdxl: boolean;
16 | checkpoint: string;
17 | subject: string;
18 | className: string;
19 | epochs: number;
20 | crop: boolean;
21 | sample: boolean;
22 | lowVRAM: boolean;
23 | regularisation: boolean;
24 | files: File[];
25 | filename: string;
26 | }
27 |
28 | export interface ImageUpload {
29 | modified?: boolean;
30 | height: number;
31 | width: number;
32 | alt: string;
33 | src: string;
34 | captionPath: string;
35 | outputPath: string;
36 | }
37 |
38 | export interface FaceBox {
39 | xPercentage: number;
40 | yPercentage: number;
41 | widthPercentage: number;
42 | heightPercentage: number;
43 | }
44 |
--------------------------------------------------------------------------------
/client/utils/samples.ts:
--------------------------------------------------------------------------------
1 | export interface PromptConfig {
2 | header: string;
3 | content: string[];
4 | flags: string;
5 | exclude: string[];
6 | }
7 |
8 | export function generatePrompt({ header, content, flags, exclude }: PromptConfig): string {
9 | return `# ${header}
10 | ${content.join(" ")} ${flags} --n ${exclude.join(", ")}
11 | `;
12 | }
13 |
14 | export function createSamplePrompt(
15 | subject: string,
16 | className: string,
17 | {
18 | real,
19 | pixar,
20 | watercolor,
21 | sdxl,
22 | }: { real?: boolean; pixar?: boolean; watercolor?: boolean; sdxl?: boolean } = {
23 | real: true,
24 | }
25 | ): string {
26 | const commonContent = [subject, className, "solo, best quality, highres, 4k"];
27 |
28 | const prompts: { [key: string]: PromptConfig } = {
29 | real: {
30 | header: "Photorealistic",
31 | content: ["portrait photo of", ...commonContent, "hasselblad, fujifilm"],
32 | flags: `--h ${sdxl ? 1216 : 768} --w ${sdxl ? 832 : 512} --l 8 --s 35`,
33 | exclude: ["worst quality", "3d", "blurry"],
34 | },
35 | pixar: {
36 | header: "Pixar style",
37 | content: [
38 | "pixar style 3d render of",
39 | ...commonContent,
40 | "cg, octane render, unreal engine",
41 | ],
42 | flags: `--h ${sdxl ? 1024 : 512} --w ${sdxl ? 1024 : 512} --l 8 --s 35`,
43 | exclude: ["worst quality", "photo", "photorealistic"],
44 | },
45 | watercolor: {
46 | header: "Watercolor",
47 | content: [
48 | "watercolor painting of",
49 | ...commonContent,
50 | "sketch, illustration, ink outlines, unfinished background",
51 | ],
52 | flags: `--h ${sdxl ? 1024 : 512} --w ${sdxl ? 1024 : 512} --l 8 --s 35`,
53 | exclude: ["worst quality", "photo", "photorealistic"],
54 | },
55 | };
56 |
57 | let result = "";
58 | if (real) {
59 | result += generatePrompt(prompts.real);
60 | }
61 |
62 | if (pixar) {
63 | result += generatePrompt(prompts.pixar);
64 | }
65 |
66 | if (watercolor) {
67 | result += generatePrompt(prompts.watercolor);
68 | }
69 |
70 | return result.trim() + "\n";
71 | }
72 |
--------------------------------------------------------------------------------
/client/utils/traverseFileTree.ts:
--------------------------------------------------------------------------------
1 | import { nanoid } from "nanoid";
2 |
3 | import type { ImageData } from "@/types";
4 |
5 | /**
6 | * Resize an image to fit within a specified width and height.
7 | * @param img - the source image
8 | * @param maxWidth - the maximum width for the output image
9 | * @param maxHeight - the maximum height for the output image
10 | * @return a promise that resolves with the data URL for the resized image
11 | */
12 | export function resizeImage(
13 | img: HTMLImageElement,
14 | maxWidth: number,
15 | maxHeight: number
16 | ): Promise {
17 | return new Promise(resolve => {
18 | const canvas = document.createElement("canvas");
19 | let { width, height } = img;
20 |
21 | // Calculate new dimensions, maintaining aspect ratio
22 | if (width > height) {
23 | if (width > maxWidth) {
24 | height *= maxWidth / width;
25 | width = maxWidth;
26 | }
27 | } else if (height > maxHeight) {
28 | width *= maxHeight / height;
29 | height = maxHeight;
30 | }
31 |
32 | canvas.width = width;
33 | canvas.height = height;
34 |
35 | const ctx = canvas.getContext("2d");
36 | ctx!.drawImage(img, 0, 0, width, height);
37 | const dataUrl = canvas.toDataURL("image/jpeg", 0.7);
38 | canvas.remove();
39 | resolve(dataUrl); // Output as jpeg with 70% quality
40 | });
41 | }
42 |
43 | export async function traverseFileTree(
44 | item: any,
45 | onLoad: (imageData: ImageData) => void,
46 | path = ""
47 | ) {
48 | if (item.isFile) {
49 | item.file((file: File) => {
50 | const reader = new FileReader();
51 | reader.onload = event => {
52 | const image = new Image();
53 | image.src = event.target!.result as string;
54 | image.onload = async () => {
55 | const maxWidth = 300;
56 | const resizedDataUrl = await resizeImage(
57 | image,
58 | maxWidth,
59 | maxWidth * (1535 / 640) // SDXL max
60 | );
61 | onLoad({
62 | id: nanoid(),
63 | data: image.src,
64 | src: resizedDataUrl,
65 | name: file.name,
66 | width: image.width,
67 | height: image.height,
68 | caption: "",
69 | });
70 | };
71 | };
72 |
73 | reader.readAsDataURL(file);
74 | });
75 | } else if (item.isDirectory) {
76 | const dirReader = item.createReader();
77 | dirReader.readEntries(async (entries: any) => {
78 | for (let i = 0; i < entries.length; i++) {
79 | // eslint-disable-next-line no-await-in-loop
80 | await traverseFileTree(entries[i], onLoad, path + item.name + "/");
81 | }
82 | });
83 | }
84 | }
85 |
--------------------------------------------------------------------------------
/next.config.js:
--------------------------------------------------------------------------------
1 | /** @type {import('next').NextConfig} */
2 | const nextConfig = {
3 | reactStrictMode: true,
4 | images: {
5 | domains: ["localhost"], // Or your local domain
6 | },
7 | };
8 |
9 | module.exports = nextConfig;
10 |
--------------------------------------------------------------------------------
/package.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "lorado",
3 | "version": "0.1.0-alpha.8",
4 | "private": true,
5 | "homepage": "https://github.com/failfa-st/LoRAdo",
6 | "bugs": {
7 | "url": "https://github.com/failfa-st/LoRAdo/issues"
8 | },
9 | "repository": {
10 | "type": "git",
11 | "url": "https://github.com/failfa-st/LoRAdo.git"
12 | },
13 | "license": "AGPL 3.0",
14 | "author": "pixelass (https://github.com/pixelass)",
15 | "scripts": {
16 | "build": "next build",
17 | "changelog": "npx conventional-changelog-cli -p angular -i CHANGELOG.md -s",
18 | "dev": "next dev",
19 | "lint": "next lint",
20 | "ncu": "npx npm-check-updates -u",
21 | "spj": "npx sort-package-json",
22 | "start": "next start",
23 | "toc": "npx markdown-toc README.md -i"
24 | },
25 | "dependencies": {
26 | "@emotion/react": "^11.11.1",
27 | "@emotion/styled": "^11.11.0",
28 | "@mui/icons-material": "^5.14.3",
29 | "@mui/joy": "^5.0.0-beta.2",
30 | "@mui/lab": "^5.0.0-alpha.140",
31 | "@mui/material": "^5.14.5",
32 | "@tensorflow/tfjs": "^4.10.0",
33 | "@tensorflow/tfjs-backend-wasm": "^4.10.0",
34 | "@tensorflow/tfjs-node": "3.18.0",
35 | "@types/formidable": "^3.4.1",
36 | "@types/node": "20.5.0",
37 | "@types/react": "18.2.20",
38 | "@types/react-dom": "18.2.7",
39 | "@vladmandic/face-api": "^1.7.12",
40 | "axios": "^1.4.0",
41 | "dialog-node": "^0.2.1",
42 | "eslint": "8.47.0",
43 | "eslint-config-next": "13.4.16",
44 | "eslint-config-prettier": "^9.0.0",
45 | "eslint-config-typescript": "^3.0.0",
46 | "eslint-config-xo": "^0.43.1",
47 | "eslint-plugin-import": "^2.28.0",
48 | "eslint-plugin-jest": "^27.2.3",
49 | "eslint-plugin-prettier": "^5.0.0",
50 | "eslint-plugin-unicorn": "^48.0.1",
51 | "eslint-plugin-unused-imports": "^3.0.0",
52 | "execa": "^7.2.0",
53 | "file-type": "^18.5.0",
54 | "formidable": "^3.5.0",
55 | "globby": "^13.2.2",
56 | "image-size": "^1.0.2",
57 | "lru-cache": "^10.0.1",
58 | "multer": "^1.4.5-lts.1",
59 | "nanoid": "^4.0.2",
60 | "next": "13.4.16",
61 | "next-connect": "^1.0.0",
62 | "node-abort-controller": "^3.1.1",
63 | "prettier": "^3.0.1",
64 | "react": "18.2.0",
65 | "react-dom": "18.2.0",
66 | "react-hook-form": "^7.45.4",
67 | "sharp": "^0.32.4",
68 | "typescript": "5.1.6",
69 | "unsplash-js": "^7.0.18"
70 | }
71 | }
72 |
--------------------------------------------------------------------------------
/pages/_app.tsx:
--------------------------------------------------------------------------------
1 | import { CssBaseline } from "@mui/joy";
2 | import { CssVarsProvider, extendTheme } from "@mui/joy/styles";
3 | import type { AppProps } from "next/app";
4 | import Head from "next/head";
5 |
6 | const theme = extendTheme({
7 | colorSchemes: {
8 | light: {
9 | palette: {},
10 | },
11 | dark: {
12 | palette: {},
13 | },
14 | },
15 | });
16 |
17 | function MyApp({ Component, pageProps }: AppProps) {
18 | return (
19 |
20 |
21 |
22 | Lorado by failfa.st
23 |
27 |
28 |
29 |
30 |
31 |
32 |
33 |
34 |
35 |
36 |
37 |
38 | );
39 | }
40 |
41 | export default MyApp;
42 |
--------------------------------------------------------------------------------
/pages/_document.tsx:
--------------------------------------------------------------------------------
1 | import { getInitColorSchemeScript } from "@mui/joy/styles";
2 | import Document, { Head, Html, Main, NextScript } from "next/document";
3 |
4 | export default class MyDocument extends Document {
5 | render() {
6 | return (
7 |
8 |
9 |
10 | {getInitColorSchemeScript()}
11 |
12 |
13 |
14 |
15 | );
16 | }
17 | }
18 |
--------------------------------------------------------------------------------
/pages/api/image/delete/index.ts:
--------------------------------------------------------------------------------
1 | import fs from "node:fs/promises";
2 |
3 | import type { NextApiRequest, NextApiResponse } from "next";
4 |
5 | export default async function uploadImageHandler(
6 | request: NextApiRequest,
7 | response: NextApiResponse
8 | ) {
9 | switch (request.method) {
10 | case "POST":
11 | try {
12 | await fs.rm(request.body.outputPath);
13 | await fs.rm(request.body.captionPath);
14 | response.status(202).json({ message: "accepted" });
15 | } catch (error) {
16 | console.log(error);
17 | // TODO handle error correctly
18 | response.status(500).json({ message: "Server error" });
19 | }
20 |
21 | break;
22 | default:
23 | response.status(405).json({ message: "Method not allowed" });
24 | break;
25 | }
26 | }
27 |
--------------------------------------------------------------------------------
/pages/api/image/edit.ts:
--------------------------------------------------------------------------------
1 | import fs from "node:fs/promises";
2 |
3 | import type { NextApiRequest, NextApiResponse } from "next";
4 |
5 | export default async function uploadImageHandler(
6 | request: NextApiRequest,
7 | response: NextApiResponse
8 | ) {
9 | switch (request.method) {
10 | case "POST":
11 | try {
12 | await fs.writeFile(request.body.captionPath, request.body.alt);
13 | response.status(201).json({ message: "success" });
14 | } catch (error) {
15 | console.log(error);
16 | // TODO handle error correctly
17 | response.status(500).json({ message: "Server error" });
18 | }
19 |
20 | break;
21 | default:
22 | response.status(405).json({ message: "Method not allowed" });
23 | break;
24 | }
25 | }
26 |
--------------------------------------------------------------------------------
/pages/api/image/upload.ts:
--------------------------------------------------------------------------------
1 | import fs from "node:fs/promises";
2 | import path from "path";
3 |
4 | import { IncomingForm } from "formidable";
5 | import type { NextApiRequest, NextApiResponse } from "next";
6 |
7 | import { prepareImage } from "@/services/prepare/prepare";
8 | import { sizes } from "@/services/prepare/sizes";
9 | import { ensureDirExists } from "@/services/prepare/utils";
10 |
11 | export const config = {
12 | api: {
13 | bodyParser: false,
14 | },
15 | };
16 |
17 | export default async function uploadImageHandler(
18 | request: NextApiRequest,
19 | response: NextApiResponse
20 | ) {
21 | switch (request.method) {
22 | case "POST":
23 | try {
24 | const form = new IncomingForm();
25 |
26 | form.parse(request, async (err, fields, files) => {
27 | if (err) {
28 | response.status(400).json({ message: "Error processing form data." });
29 | return;
30 | }
31 |
32 | const [baseDir] = fields.baseDir;
33 | const [filename] = fields.filename;
34 | const [className] = fields.className;
35 | const [subject] = fields.subject;
36 | const [caption] = fields.caption;
37 | const crop = fields.crop[0] === "true";
38 | const sdxl = fields.sdxl[0] === "true";
39 | const counter = Number.parseInt(fields.counter[0], 10);
40 | const repeats = Number.parseInt(fields.repeats[0], 10);
41 | const datasetDir = path.join(baseDir, "dataset");
42 | // Ensure directories exist
43 | await ensureDirExists(datasetDir);
44 |
45 | const filePromises = [];
46 |
47 | for (const [, fileArray] of Object.entries(files)) {
48 | if (Array.isArray(fileArray) && fileArray.length > 0) {
49 | for (const file of fileArray) {
50 | if (file.filepath) {
51 | const oldPath = file.filepath;
52 | const newPath = path.join(datasetDir, `${filename}.jpg`);
53 | const captionPath = path.join(datasetDir, `${filename}.txt`);
54 | const prepare = async () => {
55 | await fs.writeFile(captionPath, caption);
56 | await fs.copyFile(oldPath, newPath);
57 | await fs.unlink(oldPath);
58 | return prepareImage({
59 | image: newPath,
60 | counter,
61 | sizes,
62 | crop,
63 | zoomLevels: [0],
64 | repeats,
65 | className,
66 | subject,
67 | sdxl,
68 | outDir: baseDir,
69 | });
70 | };
71 |
72 | filePromises.push(prepare());
73 | } else {
74 | console.error("Unexpected file object without filepath:", file);
75 | throw new Error("Unexpected file object without filepath:");
76 | }
77 | }
78 | } else {
79 | console.error("Unexpected file structure detected:", fileArray);
80 | throw new Error("Unexpected file structure detected");
81 | }
82 | }
83 |
84 | const urls = await Promise.all(filePromises);
85 |
86 | response.status(201).json({
87 | message: "Success",
88 | caption,
89 | croppedFiles: urls.flat(),
90 | datasetFile: `/api/uploads/${
91 | datasetDir.split("training")[1]
92 | }/${filename}.jpg`
93 | .replaceAll("\\", "/")
94 | .replace(/\/+/g, "/"),
95 | });
96 | });
97 | } catch (error) {
98 | console.log(error);
99 | // TODO handle error correctly
100 | response.status(500).json({ message: "Server error" });
101 | }
102 |
103 | break;
104 | default:
105 | response.status(405).json({ message: "Method not allowed" });
106 | break;
107 | }
108 | }
109 |
--------------------------------------------------------------------------------
/pages/api/prepare.ts:
--------------------------------------------------------------------------------
1 | import fs from "node:fs/promises";
2 | import path from "path";
3 |
4 | import type { NextApiRequest, NextApiResponse } from "next";
5 |
6 | import { kohyaConfig } from "@/data/kohyaConfig";
7 | import { ensureDirExists } from "@/services/prepare/utils";
8 | import { createSamplePrompt } from "@/utils/samples";
9 |
10 | export default async function prepareDataHandler(
11 | request: NextApiRequest,
12 | response: NextApiResponse
13 | ) {
14 | switch (request.method) {
15 | case "POST":
16 | try {
17 | const baseDir = path.join(process.cwd(), "training", request.body.projectName);
18 | await ensureDirExists(baseDir);
19 |
20 | const {
21 | subject,
22 | className,
23 | epochs,
24 | lowVRAM,
25 | sdxl,
26 | sample,
27 | filename,
28 | checkpoint,
29 | regularisation,
30 | } = request.body;
31 | const networkDimension = sdxl ? 256 : 64;
32 |
33 | /* eslint-disable camelcase */
34 | const configContent = {
35 | ...kohyaConfig,
36 | epoch: epochs,
37 | output_name: filename,
38 | network_dim: lowVRAM ? 32 : networkDimension,
39 | sample_prompts: sample
40 | ? createSamplePrompt(subject, className, {
41 | real: true,
42 | sdxl: Boolean(sdxl),
43 | })
44 | : "",
45 | logging_dir: path.join(baseDir, "log"),
46 | reg_data_dir: regularisation ? path.join(baseDir, "reg") : "",
47 | output_dir: path.join(baseDir, "model"),
48 | train_data_dir: path.join(baseDir, "img"),
49 | sample_every_n_steps: sample ? 40 : 0,
50 | pretrained_model_name_or_path: checkpoint,
51 | sdxl: Boolean(sdxl),
52 | mixed_precision: lowVRAM ? "fp16" : "bf16",
53 | save_precision: lowVRAM ? "fp16" : "bf16",
54 | optimizer: lowVRAM ? "AdamW" : "Adafactor",
55 | optimizer_args: lowVRAM
56 | ? ""
57 | : "scale_parameter=False relative_step=False warmup_init=False",
58 | };
59 | /* eslint-enable camelcase */
60 |
61 | await fs.writeFile(
62 | path.join(baseDir, "config.json"),
63 | JSON.stringify(configContent, null, 2)
64 | );
65 |
66 | response.status(201).json({ message: "Success", baseDir });
67 | } catch (error) {
68 | console.log(error);
69 | // TODO handle error correctly
70 | response.status(500).json({ message: "Server error" });
71 | }
72 |
73 | break;
74 | default:
75 | response.status(405).json({ message: "Method not allowed" });
76 | break;
77 | }
78 | }
79 |
--------------------------------------------------------------------------------
/pages/api/projects/index.ts:
--------------------------------------------------------------------------------
1 | import { readdir, stat } from "node:fs/promises";
2 | import path from "node:path";
3 | import process from "node:process";
4 |
5 | import type { NextApiRequest, NextApiResponse } from "next";
6 | export async function getDirectories(
7 | parentDirectory: string
8 | ): Promise<{ fullPath: string; id: string }[]> {
9 | try {
10 | const filesAndDirs = await readdir(parentDirectory);
11 | const dirs: { fullPath: string; id: string }[] = [];
12 |
13 | for (const item of filesAndDirs) {
14 | const fullPath = path.join(parentDirectory, item);
15 | // eslint-disable-next-line no-await-in-loop
16 | if ((await stat(fullPath)).isDirectory()) {
17 | dirs.push({ fullPath, id: path.basename(fullPath) });
18 | }
19 | }
20 |
21 | return dirs;
22 | } catch (err) {
23 | console.error("Error reading directory:", err);
24 | return [];
25 | }
26 | }
27 |
28 | export default async function handler(request: NextApiRequest, response: NextApiResponse) {
29 | switch (request.method) {
30 | case "GET":
31 | try {
32 | const directories = await getDirectories(path.join(process.cwd(), "training"));
33 | response.status(200).send({ directories });
34 | } catch (error) {
35 | if (error instanceof Error) {
36 | response.status(500).send({ message: error.message });
37 | } else {
38 | response.status(500).send({ message: "An unexpected error occurred." });
39 | }
40 | }
41 |
42 | break;
43 | default:
44 | response.setHeader("Allow", ["GET"]);
45 | response.status(405).send({ message: "Method Not Allowed." });
46 | }
47 | }
48 |
--------------------------------------------------------------------------------
/pages/api/uploads/[...args].ts:
--------------------------------------------------------------------------------
1 | import { createReadStream } from "fs";
2 | import path from "node:path";
3 |
4 | import * as fileType from "file-type";
5 | import type { NextApiRequest, NextApiResponse } from "next";
6 | import sharp from "sharp";
7 |
8 | async function streamOptimizedImage(filePath: string, response: NextApiResponse) {
9 | const readStream = createReadStream(filePath);
10 |
11 | // Optimize the image on the fly
12 | const transformer = sharp().resize(1080).jpeg({ quality: 80 });
13 |
14 | readStream.pipe(transformer).pipe(response);
15 |
16 | // Deduce the MIME type and set the header
17 | const bufferChunk = readStream.read(4100) || Buffer.alloc(0);
18 | const type = await fileType.fileTypeFromBuffer(bufferChunk);
19 | response.setHeader("Content-Type", type?.mime || "application/octet-stream");
20 | }
21 |
22 | export default async function handler(request: NextApiRequest, response: NextApiResponse) {
23 | switch (request.method) {
24 | case "GET":
25 | try {
26 | const args = request.query.args as string[];
27 | const filePath = path.join(process.cwd(), "training", ...args);
28 |
29 | // Stream and optimize the image directly
30 | await streamOptimizedImage(filePath, response);
31 | } catch (error) {
32 | response.status(500).send({ message: "An unexpected error occurred." });
33 | }
34 |
35 | break;
36 |
37 | default:
38 | response.setHeader("Allow", ["GET"]);
39 | response.status(405).send({ message: "Method Not Allowed." });
40 | }
41 | }
42 |
--------------------------------------------------------------------------------
/pages/index.tsx:
--------------------------------------------------------------------------------
1 | import path from "node:path";
2 |
3 | import {
4 | Alert,
5 | Box,
6 | Button,
7 | Chip,
8 | FormControl,
9 | FormHelperText,
10 | FormLabel,
11 | Grid,
12 | Input,
13 | Stack,
14 | Switch,
15 | Typography,
16 | } from "@mui/joy";
17 | import type { AxiosError } from "axios";
18 | import axios from "axios";
19 | import { useRouter } from "next/router";
20 | import type { DragEvent } from "react";
21 | import { useState } from "react";
22 | import { Controller, useForm } from "react-hook-form";
23 |
24 | import { getDirectories } from "./api/projects";
25 |
26 | import FileUpload from "@/components/FileUpload";
27 | import ImageItem from "@/components/ImageItem";
28 | import Layout from "@/components/Layout";
29 | import Masonry from "@/components/Masonry";
30 | import SlideshowModal from "@/components/SlideshowModal";
31 | import SyncedSliderInput from "@/components/SynchedSliderInput";
32 | import { exampleImages } from "@/data/exampleImages";
33 | import type { FaceBox, FormDataModel, ImageData, ImageUpload } from "@/types";
34 | import { traverseFileTree } from "@/utils/traverseFileTree";
35 |
36 | export default function Home({ directories }: { directories: { fullPath: string; id: string }[] }) {
37 | const {
38 | formState: { errors },
39 | register,
40 | handleSubmit,
41 | control,
42 | setValue,
43 | watch,
44 | } = useForm({
45 | defaultValues: {
46 | projectName: "",
47 | sdxl: true,
48 | filename: "",
49 | checkpoint: "",
50 | subject: "",
51 | className: "",
52 | epochs: 5,
53 | crop: false,
54 | sample: false,
55 | lowVRAM: false,
56 | regularisation: false,
57 | files: [],
58 | },
59 | });
60 | const { push } = useRouter();
61 | const [loading, setLoading] = useState(false);
62 | const [error, setError] = useState(null);
63 | const [images, setImages] = useState([]);
64 | const [isModalOpen, setModalOpen] = useState(false);
65 | const [currentImageIndex, setCurrentImageIndex] = useState(null);
66 |
67 | const openModal = (index: number) => {
68 | setCurrentImageIndex(index);
69 | setModalOpen(true);
70 | };
71 |
72 | const closeModal = () => {
73 | setCurrentImageIndex(null);
74 | setModalOpen(false);
75 | };
76 |
77 | const nextImage = () => {
78 | setCurrentImageIndex(prevIndex => {
79 | if (prevIndex === null) {
80 | return 0; // Initialize to the first image if currently null
81 | }
82 |
83 | // If we're at the last index, go back to the first one (0).
84 | return prevIndex >= images.length - 1 ? 0 : prevIndex + 1;
85 | });
86 | };
87 |
88 | const prevImage = () => {
89 | setCurrentImageIndex(prevIndex => {
90 | if (prevIndex === null) {
91 | return images.length - 1; // Initialize to the last image if currently null
92 | }
93 |
94 | // If we're at the first index, go to the last one.
95 | return prevIndex === 0 ? images.length - 1 : prevIndex - 1;
96 | });
97 | };
98 |
99 | const handleDrop = async (event: DragEvent) => {
100 | event.preventDefault();
101 |
102 | const { items } = event.dataTransfer;
103 |
104 | for (let i = 0; i < items.length; i++) {
105 | const item = items[i].webkitGetAsEntry();
106 |
107 | if (item) {
108 | // eslint-disable-next-line no-await-in-loop
109 | await traverseFileTree(item, imageData => {
110 | setImages(prev => [...prev, imageData]);
111 | });
112 | }
113 | }
114 | };
115 |
116 | const handleRemove = (imageIndex: number) => {
117 | setImages(prevImages => prevImages.filter((_, index) => index !== imageIndex));
118 | if (imageIndex === currentImageIndex) {
119 | prevImage();
120 | }
121 | };
122 |
123 | const handleCaptionChange = (imageIndex: number, value: string) => {
124 | setImages(prevImages =>
125 | prevImages.map((image, index) =>
126 | index === imageIndex ? { ...image, caption: value } : image
127 | )
128 | );
129 | };
130 |
131 | const handleFace = (faceBox: FaceBox, imageIndex: number) => {
132 | setImages(prevImages =>
133 | prevImages.map((image, index) => (index === imageIndex ? { ...image, faceBox } : image))
134 | );
135 | };
136 |
137 | const onSubmit = async (data: Omit) => {
138 | setLoading(true);
139 | setError(null);
140 |
141 | try {
142 | const response = await axios.post<{ baseDir: string }>("/api/prepare", data);
143 |
144 | const repeats = Math.min(
145 | Math.max(Math.ceil(150 / (images.length * (data.crop ? 9 : 1))), 5),
146 | 50
147 | );
148 |
149 | const imagePromises = images.map(async (image, index) => {
150 | const counter = index + 1;
151 | const imageData = new FormData();
152 | const byteCharacters = atob(image.data!.split(",")[1]);
153 | const byteArrays = [];
154 |
155 | for (let offset = 0; offset < byteCharacters.length; offset += 512) {
156 | const slice = byteCharacters.slice(offset, offset + 512);
157 |
158 | const byteNumbers = new Array(slice.length);
159 | for (let i = 0; i < slice.length; i++) {
160 | byteNumbers[i] = slice.charCodeAt(i);
161 | }
162 |
163 | const byteArray = new Uint8Array(byteNumbers);
164 | byteArrays.push(byteArray);
165 | }
166 |
167 | const blob = new Blob(byteArrays, { type: "image/jpeg" });
168 | imageData.append("projectName", data.projectName);
169 | imageData.append("sdxl", data.sdxl.toString());
170 | imageData.append("file", blob, image.name);
171 | imageData.append("caption", image.caption ?? `${data.subject} ${data.className}`);
172 | imageData.append(
173 | "filename",
174 | `${data.subject}--${counter.toString().padStart(4, "0")}`
175 | );
176 | imageData.append("baseDir", response.data.baseDir);
177 | imageData.append("subject", data.subject);
178 | imageData.append("className", data.className);
179 | imageData.append("crop", data.crop.toString());
180 | imageData.append("counter", counter.toString());
181 | imageData.append("repeats", repeats.toString());
182 |
183 | // Now sending each image separately to a different endpoint
184 | const imageResponse = await axios.post<{ croppedFiles: ImageUpload[] }>(
185 | "/api/image/upload",
186 | imageData
187 | );
188 | setImages(prevState =>
189 | prevState.map(prevImage =>
190 | prevImage.id === image.id ? { ...prevImage, uploaded: true } : prevImage
191 | )
192 | );
193 | return imageResponse;
194 | });
195 |
196 | // Waiting for all image uploads to finish
197 | try {
198 | await Promise.all(imagePromises);
199 |
200 | push(`/projects/${data.projectName}`);
201 | } catch (error) {
202 | console.error(error);
203 | }
204 | } catch (error) {
205 | console.error("Error sending data: ", error);
206 | setError(error as Error | AxiosError);
207 | } finally {
208 | setLoading(false);
209 | }
210 | };
211 |
212 | const preferredLength = 8;
213 | const secondaryLength = 5;
214 | const isSDXL = watch("sdxl");
215 |
216 | return (
217 |
218 |
228 |
229 |
230 |
231 |
232 |
233 |
234 |
235 |
236 | {isSDXL ? "SDXL" : "SD 1.5"} Checkpoint
237 |
238 |
245 |
246 | Please enter the path to your checkpoint.
247 |
248 |
249 |
250 |
251 | (
258 |
263 | )}
264 | />
265 | }
266 | >
267 | SDXL
268 |
269 |
270 |
271 |
272 |
273 |
274 | Project Name
275 | (
280 |
285 | field.onChange({
286 | target: {
287 | value: event.target.value
288 | .toLowerCase()
289 | .replace(/\s+/g, "_")
290 | .replace(/_+/g, "_"),
291 | },
292 | })
293 | }
294 | />
295 | )}
296 | />
297 |
298 |
299 | Please enter the name of the project.
300 |
301 |
302 |
303 |
304 |
305 | LoRA Name
306 | (
311 |
316 | field.onChange({
317 | target: {
318 | value: event.target.value
319 | .toLowerCase()
320 | .replace(/\s+/g, "_")
321 | .replace(/_+/g, "_"),
322 | },
323 | })
324 | }
325 | />
326 | )}
327 | />
328 |
329 | Please enter the filename of the LoRA.
330 |
331 |
332 |
333 |
334 |
335 |
336 |
337 | Subject Name
338 |
343 |
344 | Please enter the name of the subject you want to train.
345 |
346 |
347 |
348 |
349 |
350 | Class Name
351 |
356 |
357 | Please enter the name of the class you want to train.
358 |
359 |
360 |
361 |
362 | (
367 | {
374 | setValue("epochs", value);
375 | field.onChange(value);
376 | }}
377 | />
378 | )}
379 | />
380 |
381 |
382 | (
389 |
394 | )}
395 | />
396 | }
397 | >
398 | Create crops
399 |
400 |
401 | Do you want to create SDXL resolution crop versions of you
402 | images?
403 |
404 |
405 |
406 | (
413 |
418 | )}
419 | />
420 | }
421 | >
422 | Create samples
423 |
424 |
425 | Do you want to create sample images during training?
426 | (slower)
427 |
428 |
429 |
430 |
431 |
432 | (
439 |
444 | )}
445 | />
446 | }
447 | >
448 | Low VRAM
449 |
450 |
451 | Optimize for low VRAM (below 16GB)? (less accurate)
452 |
453 |
454 |
455 | (
462 |
468 | )}
469 | />
470 | }
471 | >
472 | Regularisation
473 |
474 |
475 |
476 |
482 | WIP
483 |
484 | Do you want to use regularisation images during
485 | training? (more flexible but twice as slow)
486 |
487 |
488 |
489 |
490 |
491 |
499 |
500 |
501 | {error && (
502 |
503 | {error.message}
504 |
505 | )}
506 | = preferredLength
511 | ? "success"
512 | : images.length >= secondaryLength
513 | ? "warning"
514 | : "danger"
515 | /* eslint-enable no-nested-ternary */
516 | }
517 | size="sm"
518 | >
519 | Using {images.length} image{images.length === 1 ? "" : "s"}.
520 | {images.length < secondaryLength && images.length > 0 && (
521 |
522 | Less images will create less flexible LoRAs.
523 |
524 | )}
525 | {images.length < preferredLength && images.length >= 5 && (
526 |
527 | We recommend at least {preferredLength} mages.
528 |
529 | )}
530 | {images.length === 0 && (
531 | Please add images.
532 | )}
533 |
534 |
535 |
536 |
537 | {
543 | setImages(previousState => [...previousState, imageData]);
544 | }}
545 | />
546 |
547 |
548 |
549 |
550 |
559 |
560 |
561 | {images.map((image, index) => (
562 | {
566 | handleFace(faceBox, index);
567 | }}
568 | onRemove={() => {
569 | handleRemove(index);
570 | }}
571 | onOpen={() => {
572 | openModal(index);
573 | }}
574 | onCaptionChange={event => {
575 | handleCaptionChange(index, event.target.value);
576 | }}
577 | />
578 | ))}
579 |
580 | {images.length === 0 &&
581 | exampleImages.map(image => )}
582 |
583 |
584 | );
585 | }
586 |
587 | export async function getServerSideProps() {
588 | const directories = await getDirectories(path.join(process.cwd(), "training"));
589 | return {
590 | props: {
591 | directories,
592 | },
593 | };
594 | }
595 |
--------------------------------------------------------------------------------
/pages/projects/[id].tsx:
--------------------------------------------------------------------------------
1 | import fs from "node:fs/promises";
2 | import path from "node:path";
3 |
4 | import axios from "axios";
5 | import { globby } from "globby";
6 | import type { GetServerSidePropsContext } from "next";
7 | import { useEffect, useState } from "react";
8 |
9 | import { getDirectories } from "../api/projects";
10 |
11 | import Layout from "@/components/Layout";
12 | import { ProjectsTable } from "@/components/ProjectsTable";
13 | import SlideshowModal from "@/components/SlideshowModal";
14 | import { getImageDimensions } from "@/services/prepare/utils";
15 | import type { ImageUpload } from "@/types";
16 |
17 | export default function Project({
18 | directories,
19 | uploads: uploads_,
20 | }: {
21 | directories: { fullPath: string; id: string }[];
22 | uploads: ImageUpload[];
23 | }) {
24 | const [uploads, setUploads] = useState(uploads_);
25 | const [isModalOpen, setModalOpen] = useState(false);
26 | const [currentImageIndex, setCurrentImageIndex] = useState(null);
27 |
28 | const openModal = (index: number) => {
29 | setCurrentImageIndex(index);
30 | setModalOpen(true);
31 | };
32 |
33 | const closeModal = () => {
34 | setCurrentImageIndex(null);
35 | setModalOpen(false);
36 | };
37 |
38 | const nextImage = () => {
39 | setCurrentImageIndex(prevIndex => {
40 | if (prevIndex === null) {
41 | return 0; // Initialize to the first image if currently null
42 | }
43 |
44 | // If we're at the last index, go back to the first one (0).
45 | return prevIndex >= uploads.length - 1 ? 0 : prevIndex + 1;
46 | });
47 | };
48 |
49 | const prevImage = () => {
50 | setCurrentImageIndex(prevIndex => {
51 | if (prevIndex === null) {
52 | return uploads.length - 1; // Initialize to the last image if currently null
53 | }
54 |
55 | // If we're at the first index, go to the last one.
56 | return prevIndex === 0 ? uploads.length - 1 : prevIndex - 1;
57 | });
58 | };
59 |
60 | useEffect(() => {
61 | setUploads(previousUploads => (previousUploads === uploads_ ? previousUploads : uploads_));
62 | }, [uploads_]);
63 |
64 | return (
65 |
66 | ({
68 | id: image.src,
69 | name: image.src.split("/").pop()!,
70 | data: image.src,
71 | src: image.src,
72 | caption: image.alt,
73 | height: image.height,
74 | width: image.width,
75 | size: 0,
76 | }))}
77 | currentIndex={currentImageIndex === null ? 0 : currentImageIndex}
78 | isOpen={isModalOpen}
79 | onClose={closeModal}
80 | onNext={nextImage}
81 | onPrev={prevImage}
82 | onCaptionChange={(index, value) => {
83 | setUploads(previousState =>
84 | previousState.map((image, imageIndex) =>
85 | imageIndex === index
86 | ? {
87 | ...image,
88 | alt: value,
89 | modified: true,
90 | }
91 | : image
92 | )
93 | );
94 | }}
95 | onDelete={async index => {
96 | const image = uploads[index];
97 | try {
98 | await axios.post(`/api/image/delete`, image);
99 | setUploads(previousState =>
100 | previousState.filter((_, imageIndex) => imageIndex !== index)
101 | );
102 | } catch (error) {
103 | console.log(error);
104 | }
105 | }}
106 | />
107 |
108 | {
112 | setUploads(previousState =>
113 | previousState.map(image =>
114 | image.src === id
115 | ? {
116 | ...image,
117 | alt: value,
118 | modified: true,
119 | }
120 | : image
121 | )
122 | );
123 | }}
124 | onRemove={async image => {
125 | try {
126 | await axios.post(`/api/image/delete`, image);
127 | setUploads(previousState =>
128 | previousState.filter(({ src }) => src !== image.src)
129 | );
130 | } catch (error) {
131 | console.log(error);
132 | }
133 | }}
134 | onSave={async image => {
135 | try {
136 | await axios.post(`/api/image/edit`, image);
137 | setUploads(previousState =>
138 | previousState.map(image_ =>
139 | image_.src === image.src ? { ...image_, modified: false } : image_
140 | )
141 | );
142 | } catch (error) {
143 | console.log(error);
144 | }
145 | }}
146 | />
147 |
148 | );
149 | }
150 |
151 | export async function getServerSideProps(context: GetServerSidePropsContext) {
152 | const { id } = context.query;
153 | const directories = await getDirectories(path.join(process.cwd(), "training"));
154 | const project = directories.find(directory => directory.id === id);
155 | const directory = project ? path.join(project.fullPath, "img") : null;
156 | const uploadFiles = directory
157 | ? await globby(["*.png", "*/*.png"], { cwd: directory, gitignore: false })
158 | : [];
159 | const uploads = await Promise.all(
160 | uploadFiles.map(async filePath => {
161 | const outputPath = directory ? path.join(directory, filePath) : "";
162 | const { height, width } = await getImageDimensions(outputPath);
163 | let alt = "";
164 | let hasCaption = false;
165 | const captionPath = outputPath.replace(/\.png$/, ".txt");
166 | try {
167 | alt = await fs.readFile(captionPath, "utf-8");
168 | hasCaption = true;
169 | } catch (error) {
170 | console.log("no caption file found for", outputPath);
171 | }
172 |
173 | const src = `/api/uploads/${outputPath.split("training")[1]}`
174 | .replaceAll("\\", "/")
175 | .replace(/\/+/g, "/");
176 | return {
177 | height,
178 | width,
179 | src,
180 | alt,
181 | outputPath,
182 | captionPath: hasCaption ? captionPath : outputPath.replace(/\.png$/, ".txt"),
183 | };
184 | })
185 | );
186 | return {
187 | props: {
188 | directories,
189 | uploads,
190 | },
191 | };
192 | }
193 |
--------------------------------------------------------------------------------
/pages/test/face-detect.tsx:
--------------------------------------------------------------------------------
1 | import { Box } from "@mui/joy";
2 | import dynamic from "next/dynamic";
3 |
4 | import img from "../../public/images/anamnesis33/example (1).jpg";
5 |
6 | const FaceDetectionImage = dynamic(() => import("@/components/FaceDetectionImage"), {
7 | ssr: false,
8 | });
9 | export default function Page() {
10 | return (
11 |
12 |
13 |
14 | );
15 | }
16 |
--------------------------------------------------------------------------------
/public/face-api/models/age_gender_model-weights_manifest.json:
--------------------------------------------------------------------------------
1 | [
2 | {
3 | "weights":
4 | [
5 | {"name":"entry_flow/conv_in/filters","shape":[3,3,3,32],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.005431825039433498,"min":-0.7441600304023892}},
6 | {"name":"entry_flow/conv_in/bias","shape":[32],"dtype":"float32"},
7 | {"name":"entry_flow/reduction_block_0/separable_conv0/depthwise_filter","shape":[3,3,32,1],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.005691980614381678,"min":-0.6090419257388395}},
8 | {"name":"entry_flow/reduction_block_0/separable_conv0/pointwise_filter","shape":[1,1,32,64],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.009089225881239947,"min":-1.1179747833925135}},
9 | {"name":"entry_flow/reduction_block_0/separable_conv0/bias","shape":[64],"dtype":"float32"},
10 | {"name":"entry_flow/reduction_block_0/separable_conv1/depthwise_filter","shape":[3,3,64,1],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.00683894624897078,"min":-0.8138346036275228}},
11 | {"name":"entry_flow/reduction_block_0/separable_conv1/pointwise_filter","shape":[1,1,64,64],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.011632566358528886,"min":-1.3028474321552352}},
12 | {"name":"entry_flow/reduction_block_0/separable_conv1/bias","shape":[64],"dtype":"float32"},
13 | {"name":"entry_flow/reduction_block_0/expansion_conv/filters","shape":[1,1,32,64],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.010254812240600587,"min":-0.9229331016540528}},
14 | {"name":"entry_flow/reduction_block_0/expansion_conv/bias","shape":[64],"dtype":"float32"},
15 | {"name":"entry_flow/reduction_block_1/separable_conv0/depthwise_filter","shape":[3,3,64,1],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.0052509616403018725,"min":-0.6406173201168285}},
16 | {"name":"entry_flow/reduction_block_1/separable_conv0/pointwise_filter","shape":[1,1,64,128],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.010788509424994973,"min":-1.4564487723743214}},
17 | {"name":"entry_flow/reduction_block_1/separable_conv0/bias","shape":[128],"dtype":"float32"},
18 | {"name":"entry_flow/reduction_block_1/separable_conv1/depthwise_filter","shape":[3,3,128,1],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.00553213918910307,"min":-0.7025816770160899}},
19 | {"name":"entry_flow/reduction_block_1/separable_conv1/pointwise_filter","shape":[1,1,128,128],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.013602388606351965,"min":-1.6186842441558837}},
20 | {"name":"entry_flow/reduction_block_1/separable_conv1/bias","shape":[128],"dtype":"float32"},
21 | {"name":"entry_flow/reduction_block_1/expansion_conv/filters","shape":[1,1,64,128],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.007571851038465313,"min":-1.158493208885193}},
22 | {"name":"entry_flow/reduction_block_1/expansion_conv/bias","shape":[128],"dtype":"float32"},
23 | {"name":"middle_flow/main_block_0/separable_conv0/depthwise_filter","shape":[3,3,128,1],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.005766328409606335,"min":-0.6688940955143349}},
24 | {"name":"middle_flow/main_block_0/separable_conv0/pointwise_filter","shape":[1,1,128,128],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.012136116214826995,"min":-1.5776951079275094}},
25 | {"name":"middle_flow/main_block_0/separable_conv0/bias","shape":[128],"dtype":"float32"},
26 | {"name":"middle_flow/main_block_0/separable_conv1/depthwise_filter","shape":[3,3,128,1],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.004314773222979377,"min":-0.5652352922102984}},
27 | {"name":"middle_flow/main_block_0/separable_conv1/pointwise_filter","shape":[1,1,128,128],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.01107162026798024,"min":-1.2400214700137868}},
28 | {"name":"middle_flow/main_block_0/separable_conv1/bias","shape":[128],"dtype":"float32"},
29 | {"name":"middle_flow/main_block_0/separable_conv2/depthwise_filter","shape":[3,3,128,1],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.0036451735917259667,"min":-0.4848080876995536}},
30 | {"name":"middle_flow/main_block_0/separable_conv2/pointwise_filter","shape":[1,1,128,128],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.008791744942758598,"min":-1.134135097615859}},
31 | {"name":"middle_flow/main_block_0/separable_conv2/bias","shape":[128],"dtype":"float32"},
32 | {"name":"middle_flow/main_block_1/separable_conv0/depthwise_filter","shape":[3,3,128,1],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.004915751896652521,"min":-0.6095532351849126}},
33 | {"name":"middle_flow/main_block_1/separable_conv0/pointwise_filter","shape":[1,1,128,128],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.010868691463096469,"min":-1.3368490499608656}},
34 | {"name":"middle_flow/main_block_1/separable_conv0/bias","shape":[128],"dtype":"float32"},
35 | {"name":"middle_flow/main_block_1/separable_conv1/depthwise_filter","shape":[3,3,128,1],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.005010117269029804,"min":-0.6012140722835765}},
36 | {"name":"middle_flow/main_block_1/separable_conv1/pointwise_filter","shape":[1,1,128,128],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.010311148213405235,"min":-1.3816938605963016}},
37 | {"name":"middle_flow/main_block_1/separable_conv1/bias","shape":[128],"dtype":"float32"},
38 | {"name":"middle_flow/main_block_1/separable_conv2/depthwise_filter","shape":[3,3,128,1],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.004911523706772748,"min":-0.7367285560159123}},
39 | {"name":"middle_flow/main_block_1/separable_conv2/pointwise_filter","shape":[1,1,128,128],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.008976466047997568,"min":-1.2207993825276693}},
40 | {"name":"middle_flow/main_block_1/separable_conv2/bias","shape":[128],"dtype":"float32"},
41 | {"name":"exit_flow/reduction_block/separable_conv0/depthwise_filter","shape":[3,3,128,1],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.005074804436926748,"min":-0.7104726211697447}},
42 | {"name":"exit_flow/reduction_block/separable_conv0/pointwise_filter","shape":[1,1,128,256],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.011453078307357489,"min":-1.4545409450344011}},
43 | {"name":"exit_flow/reduction_block/separable_conv0/bias","shape":[256],"dtype":"float32"},
44 | {"name":"exit_flow/reduction_block/separable_conv1/depthwise_filter","shape":[3,3,256,1],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.007741751390344957,"min":-1.1380374543807086}},
45 | {"name":"exit_flow/reduction_block/separable_conv1/pointwise_filter","shape":[1,1,256,256],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.011347713189966538,"min":-1.497898141075583}},
46 | {"name":"exit_flow/reduction_block/separable_conv1/bias","shape":[256],"dtype":"float32"},
47 | {"name":"exit_flow/reduction_block/expansion_conv/filters","shape":[1,1,128,256],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.006717281014311547,"min":-0.8329428457746318}},
48 | {"name":"exit_flow/reduction_block/expansion_conv/bias","shape":[256],"dtype":"float32"},
49 | {"name":"exit_flow/separable_conv/depthwise_filter","shape":[3,3,256,1],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.0027201742518181892,"min":-0.3237007359663645}},
50 | {"name":"exit_flow/separable_conv/pointwise_filter","shape":[1,1,256,512],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.010076364348916447,"min":-1.330080094056971}},
51 | {"name":"exit_flow/separable_conv/bias","shape":[512],"dtype":"float32"},
52 | {"name":"fc/age/weights","shape":[512,1],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.008674054987290326,"min":-1.2664120281443876}},
53 | {"name":"fc/age/bias","shape":[1],"dtype":"float32"},
54 | {"name":"fc/gender/weights","shape":[512,2],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.0029948226377075793,"min":-0.34140978069866407}},
55 | {"name":"fc/gender/bias","shape":[2],"dtype":"float32"}
56 | ],
57 | "paths":
58 | [
59 | "age_gender_model.bin"
60 | ]
61 | }
62 | ]
--------------------------------------------------------------------------------
/public/face-api/models/age_gender_model.bin:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/blib-la/LoRAdo/d3138e11d6c98e8ad7fe73c2a9d7fea5474ee8ed/public/face-api/models/age_gender_model.bin
--------------------------------------------------------------------------------
/public/face-api/models/face_expression_model-weights_manifest.json:
--------------------------------------------------------------------------------
1 | [
2 | {
3 | "weights":
4 | [
5 | {"name":"dense0/conv0/filters","shape":[3,3,3,32],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.0057930146946626555,"min":-0.7125408074435067}},
6 | {"name":"dense0/conv0/bias","shape":[32],"dtype":"float32"},
7 | {"name":"dense0/conv1/depthwise_filter","shape":[3,3,32,1],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.006473719839956246,"min":-0.6408982641556684}},
8 | {"name":"dense0/conv1/pointwise_filter","shape":[1,1,32,32],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.010509579321917366,"min":-1.408283629136927}},
9 | {"name":"dense0/conv1/bias","shape":[32],"dtype":"float32"},
10 | {"name":"dense0/conv2/depthwise_filter","shape":[3,3,32,1],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.005666389652326995,"min":-0.7252978754978554}},
11 | {"name":"dense0/conv2/pointwise_filter","shape":[1,1,32,32],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.010316079270605948,"min":-1.1760330368490781}},
12 | {"name":"dense0/conv2/bias","shape":[32],"dtype":"float32"},
13 | {"name":"dense0/conv3/depthwise_filter","shape":[3,3,32,1],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.0063220320963392074,"min":-0.853474333005793}},
14 | {"name":"dense0/conv3/pointwise_filter","shape":[1,1,32,32],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.010322785377502442,"min":-1.4658355236053466}},
15 | {"name":"dense0/conv3/bias","shape":[32],"dtype":"float32"},
16 | {"name":"dense1/conv0/depthwise_filter","shape":[3,3,32,1],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.0042531527724920535,"min":-0.5741756242864272}},
17 | {"name":"dense1/conv0/pointwise_filter","shape":[1,1,32,64],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.010653339647779278,"min":-1.1825207009035}},
18 | {"name":"dense1/conv0/bias","shape":[64],"dtype":"float32"},
19 | {"name":"dense1/conv1/depthwise_filter","shape":[3,3,64,1],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.005166931012097527,"min":-0.6355325144879957}},
20 | {"name":"dense1/conv1/pointwise_filter","shape":[1,1,64,64],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.011478300188101974,"min":-1.3888743227603388}},
21 | {"name":"dense1/conv1/bias","shape":[64],"dtype":"float32"},
22 | {"name":"dense1/conv2/depthwise_filter","shape":[3,3,64,1],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.006144821410085641,"min":-0.8479853545918185}},
23 | {"name":"dense1/conv2/pointwise_filter","shape":[1,1,64,64],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.010541967317169788,"min":-1.3809977185492421}},
24 | {"name":"dense1/conv2/bias","shape":[64],"dtype":"float32"},
25 | {"name":"dense1/conv3/depthwise_filter","shape":[3,3,64,1],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.005769844849904378,"min":-0.686611537138621}},
26 | {"name":"dense1/conv3/pointwise_filter","shape":[1,1,64,64],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.010939095534530341,"min":-1.2689350820055196}},
27 | {"name":"dense1/conv3/bias","shape":[64],"dtype":"float32"},
28 | {"name":"dense2/conv0/depthwise_filter","shape":[3,3,64,1],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.0037769308277204924,"min":-0.40790852939381317}},
29 | {"name":"dense2/conv0/pointwise_filter","shape":[1,1,64,128],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.01188667194516051,"min":-1.4382873053644218}},
30 | {"name":"dense2/conv0/bias","shape":[128],"dtype":"float32"},
31 | {"name":"dense2/conv1/depthwise_filter","shape":[3,3,128,1],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.006497045825509464,"min":-0.8381189114907208}},
32 | {"name":"dense2/conv1/pointwise_filter","shape":[1,1,128,128],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.011632198913424622,"min":-1.3377028750438316}},
33 | {"name":"dense2/conv1/bias","shape":[128],"dtype":"float32"},
34 | {"name":"dense2/conv2/depthwise_filter","shape":[3,3,128,1],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.005947182225246056,"min":-0.7969224181829715}},
35 | {"name":"dense2/conv2/pointwise_filter","shape":[1,1,128,128],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.011436844339557722,"min":-1.4524792311238306}},
36 | {"name":"dense2/conv2/bias","shape":[128],"dtype":"float32"},
37 | {"name":"dense2/conv3/depthwise_filter","shape":[3,3,128,1],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.006665432686899222,"min":-0.8998334127313949}},
38 | {"name":"dense2/conv3/pointwise_filter","shape":[1,1,128,128],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.01283421422920975,"min":-1.642779421338848}},
39 | {"name":"dense2/conv3/bias","shape":[128],"dtype":"float32"},
40 | {"name":"dense3/conv0/depthwise_filter","shape":[3,3,128,1],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.004711699953266218,"min":-0.6737730933170692}},
41 | {"name":"dense3/conv0/pointwise_filter","shape":[1,1,128,256],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.010955964817720302,"min":-1.3914075318504784}},
42 | {"name":"dense3/conv0/bias","shape":[256],"dtype":"float32"},
43 | {"name":"dense3/conv1/depthwise_filter","shape":[3,3,256,1],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.00554193468654857,"min":-0.7149095745647656}},
44 | {"name":"dense3/conv1/pointwise_filter","shape":[1,1,256,256],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.016790372250126858,"min":-2.484975093018775}},
45 | {"name":"dense3/conv1/bias","shape":[256],"dtype":"float32"},
46 | {"name":"dense3/conv2/depthwise_filter","shape":[3,3,256,1],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.006361540626077091,"min":-0.8142772001378676}},
47 | {"name":"dense3/conv2/pointwise_filter","shape":[1,1,256,256],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.01777329678628959,"min":-1.7062364914838006}},
48 | {"name":"dense3/conv2/bias","shape":[256],"dtype":"float32"},
49 | {"name":"dense3/conv3/depthwise_filter","shape":[3,3,256,1],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.006900275922289082,"min":-0.8625344902861353}},
50 | {"name":"dense3/conv3/pointwise_filter","shape":[1,1,256,256],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.015449936717164282,"min":-1.9003422162112067}},
51 | {"name":"dense3/conv3/bias","shape":[256],"dtype":"float32"},
52 | {"name":"fc/weights","shape":[256,7],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.004834276554631252,"min":-0.7203072066400565}},
53 | {"name":"fc/bias","shape":[7],"dtype":"float32"}
54 | ],
55 | "paths":
56 | [
57 | "face_expression_model.bin"
58 | ]
59 | }
60 | ]
--------------------------------------------------------------------------------
/public/face-api/models/face_expression_model.bin:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/blib-la/LoRAdo/d3138e11d6c98e8ad7fe73c2a9d7fea5474ee8ed/public/face-api/models/face_expression_model.bin
--------------------------------------------------------------------------------
/public/face-api/models/face_landmark_68_model-weights_manifest.json:
--------------------------------------------------------------------------------
1 | [
2 | {
3 | "weights":
4 | [
5 | {"name":"dense0/conv0/filters","shape":[3,3,3,32],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.004853619781194949,"min":-0.5872879935245888}},
6 | {"name":"dense0/conv0/bias","shape":[32],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.004396426443960153,"min":-0.7298067896973853}},
7 | {"name":"dense0/conv1/depthwise_filter","shape":[3,3,32,1],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.00635151559231328,"min":-0.5589333721235686}},
8 | {"name":"dense0/conv1/pointwise_filter","shape":[1,1,32,32],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.009354315552057004,"min":-1.2628325995276957}},
9 | {"name":"dense0/conv1/bias","shape":[32],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.0029380727048013726,"min":-0.5846764682554731}},
10 | {"name":"dense0/conv2/depthwise_filter","shape":[3,3,32,1],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.0049374802439820535,"min":-0.6171850304977566}},
11 | {"name":"dense0/conv2/pointwise_filter","shape":[1,1,32,32],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.009941946758943446,"min":-1.3421628124573652}},
12 | {"name":"dense0/conv2/bias","shape":[32],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.0030300481062309416,"min":-0.5272283704841838}},
13 | {"name":"dense0/conv3/depthwise_filter","shape":[3,3,32,1],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.005672684837790097,"min":-0.7431217137505026}},
14 | {"name":"dense0/conv3/pointwise_filter","shape":[1,1,32,32],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.010712201455060173,"min":-1.5639814124387852}},
15 | {"name":"dense0/conv3/bias","shape":[32],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.0030966934035806097,"min":-0.3839899820439956}},
16 | {"name":"dense1/conv0/depthwise_filter","shape":[3,3,32,1],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.0039155554537679636,"min":-0.48161332081345953}},
17 | {"name":"dense1/conv0/pointwise_filter","shape":[1,1,32,64],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.01023082966898002,"min":-1.094698774580862}},
18 | {"name":"dense1/conv0/bias","shape":[64],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.0027264176630506327,"min":-0.3871513081531898}},
19 | {"name":"dense1/conv1/depthwise_filter","shape":[3,3,64,1],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.004583378632863362,"min":-0.5454220573107401}},
20 | {"name":"dense1/conv1/pointwise_filter","shape":[1,1,64,64],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.00915846403907327,"min":-1.117332612766939}},
21 | {"name":"dense1/conv1/bias","shape":[64],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.003091680419211294,"min":-0.5966943209077797}},
22 | {"name":"dense1/conv2/depthwise_filter","shape":[3,3,64,1],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.005407439727409214,"min":-0.708374604290607}},
23 | {"name":"dense1/conv2/pointwise_filter","shape":[1,1,64,64],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.00946493943532308,"min":-1.2399070660273235}},
24 | {"name":"dense1/conv2/bias","shape":[64],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.004409168514550901,"min":-0.9788354102303}},
25 | {"name":"dense1/conv3/depthwise_filter","shape":[3,3,64,1],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.004478132958505668,"min":-0.6493292789833219}},
26 | {"name":"dense1/conv3/pointwise_filter","shape":[1,1,64,64],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.011063695888893277,"min":-1.2501976354449402}},
27 | {"name":"dense1/conv3/bias","shape":[64],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.003909627596537272,"min":-0.6646366914113363}},
28 | {"name":"dense2/conv0/depthwise_filter","shape":[3,3,64,1],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.003213915404151468,"min":-0.3374611174359041}},
29 | {"name":"dense2/conv0/pointwise_filter","shape":[1,1,64,128],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.010917326048308728,"min":-1.4520043644250609}},
30 | {"name":"dense2/conv0/bias","shape":[128],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.002800439152063108,"min":-0.38085972468058266}},
31 | {"name":"dense2/conv1/depthwise_filter","shape":[3,3,128,1],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.0050568851770139206,"min":-0.6927932692509071}},
32 | {"name":"dense2/conv1/pointwise_filter","shape":[1,1,128,128],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.01074961213504567,"min":-1.3222022926106174}},
33 | {"name":"dense2/conv1/bias","shape":[128],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.0030654204242369708,"min":-0.5487102559384177}},
34 | {"name":"dense2/conv2/depthwise_filter","shape":[3,3,128,1],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.00591809165244009,"min":-0.917304206128214}},
35 | {"name":"dense2/conv2/pointwise_filter","shape":[1,1,128,128],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.01092823346455892,"min":-1.366029183069865}},
36 | {"name":"dense2/conv2/bias","shape":[128],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.002681120470458386,"min":-0.36463238398234055}},
37 | {"name":"dense2/conv3/depthwise_filter","shape":[3,3,128,1],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.0048311497650894465,"min":-0.5797379718107336}},
38 | {"name":"dense2/conv3/pointwise_filter","shape":[1,1,128,128],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.011227761062921263,"min":-1.4483811771168429}},
39 | {"name":"dense2/conv3/bias","shape":[128],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.0034643323982463162,"min":-0.3360402426298927}},
40 | {"name":"dense3/conv0/depthwise_filter","shape":[3,3,128,1],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.003394978887894574,"min":-0.49227193874471326}},
41 | {"name":"dense3/conv0/pointwise_filter","shape":[1,1,128,256],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.010051267287310432,"min":-1.2765109454884247}},
42 | {"name":"dense3/conv0/bias","shape":[256],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.003142924752889895,"min":-0.4588670139219247}},
43 | {"name":"dense3/conv1/depthwise_filter","shape":[3,3,256,1],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.00448304671867221,"min":-0.5872791201460595}},
44 | {"name":"dense3/conv1/pointwise_filter","shape":[1,1,256,256],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.016063522357566685,"min":-2.3613377865623026}},
45 | {"name":"dense3/conv1/bias","shape":[256],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.00287135781026354,"min":-0.47664539650374765}},
46 | {"name":"dense3/conv2/depthwise_filter","shape":[3,3,256,1],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.006002906724518421,"min":-0.7923836876364315}},
47 | {"name":"dense3/conv2/pointwise_filter","shape":[1,1,256,256],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.017087187019048954,"min":-1.6061955797906016}},
48 | {"name":"dense3/conv2/bias","shape":[256],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.003124481205846749,"min":-0.46242321846531886}},
49 | {"name":"dense3/conv3/depthwise_filter","shape":[3,3,256,1],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.006576311588287353,"min":-1.0193282961845398}},
50 | {"name":"dense3/conv3/pointwise_filter","shape":[1,1,256,256],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.015590153955945782,"min":-1.99553970636106}},
51 | {"name":"dense3/conv3/bias","shape":[256],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.004453541601405424,"min":-0.6546706154065973}},
52 | {"name":"fc/weights","shape":[256,136],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.010417488509533453,"min":-1.500118345372817}},
53 | {"name":"fc/bias","shape":[136],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.0025084222648658005,"min":0.07683877646923065}}
54 | ],
55 | "paths":
56 | [
57 | "face_landmark_68_model.bin"
58 | ]
59 | }
60 | ]
--------------------------------------------------------------------------------
/public/face-api/models/face_landmark_68_model.bin:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/blib-la/LoRAdo/d3138e11d6c98e8ad7fe73c2a9d7fea5474ee8ed/public/face-api/models/face_landmark_68_model.bin
--------------------------------------------------------------------------------
/public/face-api/models/face_landmark_68_tiny_model-weights_manifest.json:
--------------------------------------------------------------------------------
1 | [
2 | {
3 | "weights":
4 | [
5 | {"name":"dense0/conv0/filters","shape":[3,3,3,32],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.008194216092427571,"min":-0.9423348506291708}},
6 | {"name":"dense0/conv0/bias","shape":[32],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.006839508168837603,"min":-0.8412595047670252}},
7 | {"name":"dense0/conv1/depthwise_filter","shape":[3,3,32,1],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.009194007106855804,"min":-1.2779669878529567}},
8 | {"name":"dense0/conv1/pointwise_filter","shape":[1,1,32,32],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.0036026100317637128,"min":-0.3170296827952067}},
9 | {"name":"dense0/conv1/bias","shape":[32],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.000740380117706224,"min":-0.06367269012273527}},
10 | {"name":"dense0/conv2/depthwise_filter","shape":[3,3,32,1],"dtype":"float32","quantization":{"dtype":"uint8","scale":1,"min":0}},
11 | {"name":"dense0/conv2/pointwise_filter","shape":[1,1,32,32],"dtype":"float32","quantization":{"dtype":"uint8","scale":1,"min":0}},
12 | {"name":"dense0/conv2/bias","shape":[32],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.0037702228508743585,"min":-0.6220867703942692}},
13 | {"name":"dense1/conv0/depthwise_filter","shape":[3,3,32,1],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.0033707996209462483,"min":-0.421349952618281}},
14 | {"name":"dense1/conv0/pointwise_filter","shape":[1,1,32,64],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.014611541991140328,"min":-1.8556658328748217}},
15 | {"name":"dense1/conv0/bias","shape":[64],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.002832523046755323,"min":-0.30307996600281956}},
16 | {"name":"dense1/conv1/depthwise_filter","shape":[3,3,64,1],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.006593170586754294,"min":-0.6329443763284123}},
17 | {"name":"dense1/conv1/pointwise_filter","shape":[1,1,64,64],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.012215249211180444,"min":-1.6001976466646382}},
18 | {"name":"dense1/conv1/bias","shape":[64],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.002384825547536214,"min":-0.3028728445370992}},
19 | {"name":"dense1/conv2/depthwise_filter","shape":[3,3,64,1],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.005859645441466687,"min":-0.7617539073906693}},
20 | {"name":"dense1/conv2/pointwise_filter","shape":[1,1,64,64],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.013121426806730382,"min":-1.7845140457153321}},
21 | {"name":"dense1/conv2/bias","shape":[64],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.0032247188044529336,"min":-0.46435950784122243}},
22 | {"name":"dense2/conv0/depthwise_filter","shape":[3,3,64,1],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.002659512618008782,"min":-0.32977956463308894}},
23 | {"name":"dense2/conv0/pointwise_filter","shape":[1,1,64,128],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.015499923743453681,"min":-1.9839902391620712}},
24 | {"name":"dense2/conv0/bias","shape":[128],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.0032450980999890497,"min":-0.522460794098237}},
25 | {"name":"dense2/conv1/depthwise_filter","shape":[3,3,128,1],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.005911862382701799,"min":-0.792189559282041}},
26 | {"name":"dense2/conv1/pointwise_filter","shape":[1,1,128,128],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.021025861478319356,"min":-2.2077154552235325}},
27 | {"name":"dense2/conv1/bias","shape":[128],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.00349616945958605,"min":-0.46149436866535865}},
28 | {"name":"dense2/conv2/depthwise_filter","shape":[3,3,128,1],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.008104994250278847,"min":-1.013124281284856}},
29 | {"name":"dense2/conv2/pointwise_filter","shape":[1,1,128,128],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.029337059282789044,"min":-3.5791212325002633}},
30 | {"name":"dense2/conv2/bias","shape":[128],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.0038808938334969913,"min":-0.4230174278511721}},
31 | {"name":"fc/weights","shape":[128,136],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.014016061670639936,"min":-1.8921683255363912}},
32 | {"name":"fc/bias","shape":[136],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.0029505149698724935,"min":0.088760145008564}}
33 | ],
34 | "paths":
35 | [
36 | "face_landmark_68_tiny_model.bin"
37 | ]
38 | }
39 | ]
--------------------------------------------------------------------------------
/public/face-api/models/face_landmark_68_tiny_model.bin:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/blib-la/LoRAdo/d3138e11d6c98e8ad7fe73c2a9d7fea5474ee8ed/public/face-api/models/face_landmark_68_tiny_model.bin
--------------------------------------------------------------------------------
/public/face-api/models/face_recognition_model-weights_manifest.json:
--------------------------------------------------------------------------------
1 | [
2 | {
3 | "weights":
4 | [
5 | {"name":"conv32_down/conv/filters","shape":[7,7,3,32],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.0005260649557207145,"min":-0.07101876902229645}},
6 | {"name":"conv32_down/conv/bias","shape":[32],"dtype":"float32","quantization":{"dtype":"uint8","scale":8.471445956577858e-7,"min":-0.00014740315964445472}},
7 | {"name":"conv32_down/scale/weights","shape":[32],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.06814416062598135,"min":5.788674831390381}},
8 | {"name":"conv32_down/scale/biases","shape":[32],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.008471635042452345,"min":-0.931879854669758}},
9 | {"name":"conv32_1/conv1/conv/filters","shape":[3,3,32,32],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.0007328585666768691,"min":-0.0974701893680236}},
10 | {"name":"conv32_1/conv1/conv/bias","shape":[32],"dtype":"float32","quantization":{"dtype":"uint8","scale":1.5952091238361e-8,"min":-0.000001978059313556764}},
11 | {"name":"conv32_1/conv1/scale/weights","shape":[32],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.02146628510718252,"min":3.1103382110595703}},
12 | {"name":"conv32_1/conv1/scale/biases","shape":[32],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.0194976619645661,"min":-2.3787147596770644}},
13 | {"name":"conv32_1/conv2/conv/filters","shape":[3,3,32,32],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.0004114975824075587,"min":-0.05267169054816751}},
14 | {"name":"conv32_1/conv2/conv/bias","shape":[32],"dtype":"float32","quantization":{"dtype":"uint8","scale":4.600177166424806e-9,"min":-5.70421968636676e-7}},
15 | {"name":"conv32_1/conv2/scale/weights","shape":[32],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.03400764932819441,"min":2.1677730083465576}},
16 | {"name":"conv32_1/conv2/scale/biases","shape":[32],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.010974494616190593,"min":-1.240117891629537}},
17 | {"name":"conv32_2/conv1/conv/filters","shape":[3,3,32,32],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.0005358753251094444,"min":-0.0760942961655411}},
18 | {"name":"conv32_2/conv1/conv/bias","shape":[32],"dtype":"float32","quantization":{"dtype":"uint8","scale":5.9886454383719385e-9,"min":-7.366033889197485e-7}},
19 | {"name":"conv32_2/conv1/scale/weights","shape":[32],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.014633869657329485,"min":2.769575357437134}},
20 | {"name":"conv32_2/conv1/scale/biases","shape":[32],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.022131107367721257,"min":-2.5229462399202234}},
21 | {"name":"conv32_2/conv2/conv/filters","shape":[3,3,32,32],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.00030145110452876373,"min":-0.03949009469326805}},
22 | {"name":"conv32_2/conv2/conv/bias","shape":[32],"dtype":"float32","quantization":{"dtype":"uint8","scale":6.8779549306497095e-9,"min":-9.010120959151119e-7}},
23 | {"name":"conv32_2/conv2/scale/weights","shape":[32],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.03929369870354148,"min":4.8010945320129395}},
24 | {"name":"conv32_2/conv2/scale/biases","shape":[32],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.010553357180427103,"min":-1.2452961472903983}},
25 | {"name":"conv32_3/conv1/conv/filters","shape":[3,3,32,32],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.0003133527642371608,"min":-0.040735859350830905}},
26 | {"name":"conv32_3/conv1/conv/bias","shape":[32],"dtype":"float32","quantization":{"dtype":"uint8","scale":4.1064200719547974e-9,"min":-3.0387508532465503e-7}},
27 | {"name":"conv32_3/conv1/scale/weights","shape":[32],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.009252088210161994,"min":2.333256721496582}},
28 | {"name":"conv32_3/conv1/scale/biases","shape":[32],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.007104101251153385,"min":-0.34810096130651585}},
29 | {"name":"conv32_3/conv2/conv/filters","shape":[3,3,32,32],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.00029995629892629733,"min":-0.031195455088334923}},
30 | {"name":"conv32_3/conv2/conv/bias","shape":[32],"dtype":"float32","quantization":{"dtype":"uint8","scale":5.62726418316814e-9,"min":-6.921534945296811e-7}},
31 | {"name":"conv32_3/conv2/scale/weights","shape":[32],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.0467432975769043,"min":5.362040996551514}},
32 | {"name":"conv32_3/conv2/scale/biases","shape":[32],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.010314425300149357,"min":-1.268674311918371}},
33 | {"name":"conv64_down/conv1/conv/filters","shape":[3,3,32,64],"dtype":"float32"},
34 | {"name":"conv64_down/conv1/conv/bias","shape":[64],"dtype":"float32","quantization":{"dtype":"uint8","scale":8.373908033218849e-10,"min":-1.172347124650639e-7}},
35 | {"name":"conv64_down/conv1/scale/weights","shape":[64],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.0066875364266189875,"min":2.5088400840759277}},
36 | {"name":"conv64_down/conv1/scale/biases","shape":[64],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.01691421620986041,"min":-2.0973628100226906}},
37 | {"name":"conv64_down/conv2/conv/filters","shape":[3,3,64,64],"dtype":"float32"},
38 | {"name":"conv64_down/conv2/conv/bias","shape":[64],"dtype":"float32","quantization":{"dtype":"uint8","scale":2.3252014483766877e-9,"min":-2.673981665633191e-7}},
39 | {"name":"conv64_down/conv2/scale/weights","shape":[64],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.032557439804077146,"min":2.6351239681243896}},
40 | {"name":"conv64_down/conv2/scale/biases","shape":[64],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.015429047509735706,"min":-1.5429047509735707}},
41 | {"name":"conv64_1/conv1/conv/filters","shape":[3,3,64,64],"dtype":"float32"},
42 | {"name":"conv64_1/conv1/conv/bias","shape":[64],"dtype":"float32","quantization":{"dtype":"uint8","scale":1.1319172039756998e-9,"min":-1.4941307092479238e-7}},
43 | {"name":"conv64_1/conv1/scale/weights","shape":[64],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.007802607031429515,"min":3.401733160018921}},
44 | {"name":"conv64_1/conv1/scale/biases","shape":[64],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.01425027146058924,"min":-0.6982633015688727}},
45 | {"name":"conv64_1/conv2/conv/filters","shape":[3,3,64,64],"dtype":"float32"},
46 | {"name":"conv64_1/conv2/conv/bias","shape":[64],"dtype":"float32","quantization":{"dtype":"uint8","scale":2.5635019893325435e-9,"min":-2.717312108692496e-7}},
47 | {"name":"conv64_1/conv2/scale/weights","shape":[64],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.04062801716374416,"min":3.542381525039673}},
48 | {"name":"conv64_1/conv2/scale/biases","shape":[64],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.007973166306813557,"min":-0.7415044665336609}},
49 | {"name":"conv64_2/conv1/conv/filters","shape":[3,3,64,64],"dtype":"float32"},
50 | {"name":"conv64_2/conv1/conv/bias","shape":[64],"dtype":"float32","quantization":{"dtype":"uint8","scale":1.2535732661062331e-9,"min":-1.8302169685151004e-7}},
51 | {"name":"conv64_2/conv1/scale/weights","shape":[64],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.005631206549850164,"min":2.9051668643951416}},
52 | {"name":"conv64_2/conv1/scale/biases","shape":[64],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.01859012585060269,"min":-2.3795361088771445}},
53 | {"name":"conv64_2/conv2/conv/filters","shape":[3,3,64,64],"dtype":"float32"},
54 | {"name":"conv64_2/conv2/conv/bias","shape":[64],"dtype":"float32","quantization":{"dtype":"uint8","scale":2.486726369919351e-9,"min":-3.5311514452854786e-7}},
55 | {"name":"conv64_2/conv2/scale/weights","shape":[64],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.03740917467603497,"min":5.571568965911865}},
56 | {"name":"conv64_2/conv2/scale/biases","shape":[64],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.006418555858088475,"min":-0.5263215803632549}},
57 | {"name":"conv64_3/conv1/conv/filters","shape":[3,3,64,64],"dtype":"float32"},
58 | {"name":"conv64_3/conv1/conv/bias","shape":[64],"dtype":"float32","quantization":{"dtype":"uint8","scale":7.432564576875473e-10,"min":-8.47312361763804e-8}},
59 | {"name":"conv64_3/conv1/scale/weights","shape":[64],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.006400122362024644,"min":2.268010377883911}},
60 | {"name":"conv64_3/conv1/scale/biases","shape":[64],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.010945847922680425,"min":-1.3353934465670119}},
61 | {"name":"conv64_3/conv2/conv/filters","shape":[3,3,64,64],"dtype":"float32"},
62 | {"name":"conv64_3/conv2/conv/bias","shape":[64],"dtype":"float32","quantization":{"dtype":"uint8","scale":2.278228722014533e-9,"min":-3.212302498040492e-7}},
63 | {"name":"conv64_3/conv2/scale/weights","shape":[64],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.029840927498013366,"min":7.038398265838623}},
64 | {"name":"conv64_3/conv2/scale/biases","shape":[64],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.010651412197187834,"min":-1.161003929493474}},
65 | {"name":"conv128_down/conv1/conv/filters","shape":[3,3,64,128],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.00020040544662989823,"min":-0.022245004575918704}},
66 | {"name":"conv128_down/conv1/conv/bias","shape":[128],"dtype":"float32","quantization":{"dtype":"uint8","scale":4.3550543563576545e-10,"min":-4.311503812794078e-8}},
67 | {"name":"conv128_down/conv1/scale/weights","shape":[128],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.007448580685783835,"min":2.830846071243286}},
68 | {"name":"conv128_down/conv1/scale/biases","shape":[128],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.01211262824488621,"min":-1.6957679542840696}},
69 | {"name":"conv128_down/conv2/conv/filters","shape":[3,3,128,128],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.00022380277514457702,"min":-0.02484210804104805}},
70 | {"name":"conv128_down/conv2/conv/bias","shape":[128],"dtype":"float32","quantization":{"dtype":"uint8","scale":9.031058637304466e-10,"min":-1.1650065642122761e-7}},
71 | {"name":"conv128_down/conv2/scale/weights","shape":[128],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.027663578706629135,"min":3.1111555099487305}},
72 | {"name":"conv128_down/conv2/scale/biases","shape":[128],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.008878476946961646,"min":-1.029903325847551}},
73 | {"name":"conv128_1/conv1/conv/filters","shape":[3,3,128,128],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.00022380667574265425,"min":-0.032899581334170175}},
74 | {"name":"conv128_1/conv1/conv/bias","shape":[128],"dtype":"float32","quantization":{"dtype":"uint8","scale":4.4147297756478345e-10,"min":-5.253528433020923e-8}},
75 | {"name":"conv128_1/conv1/scale/weights","shape":[128],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.013599334978589825,"min":3.634530782699585}},
76 | {"name":"conv128_1/conv1/scale/biases","shape":[128],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.014059314073300829,"min":-1.4059314073300828}},
77 | {"name":"conv128_1/conv2/conv/filters","shape":[3,3,128,128],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.00021715293474057143,"min":-0.02909849325523657}},
78 | {"name":"conv128_1/conv2/conv/bias","shape":[128],"dtype":"float32","quantization":{"dtype":"uint8","scale":9.887046963276768e-10,"min":-1.1370104007768284e-7}},
79 | {"name":"conv128_1/conv2/scale/weights","shape":[128],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.029993299409454943,"min":3.630716562271118}},
80 | {"name":"conv128_1/conv2/scale/biases","shape":[128],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.00782704236460667,"min":-0.7200878975438136}},
81 | {"name":"conv128_2/conv1/conv/filters","shape":[3,3,128,128],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.00017718105923895743,"min":-0.022324813464108636}},
82 | {"name":"conv128_2/conv1/conv/bias","shape":[128],"dtype":"float32","quantization":{"dtype":"uint8","scale":3.567012027797675e-10,"min":-5.243507680862582e-8}},
83 | {"name":"conv128_2/conv1/scale/weights","shape":[128],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.007940645778880399,"min":4.927767753601074}},
84 | {"name":"conv128_2/conv1/scale/biases","shape":[128],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.015933452867994122,"min":-1.5614783810634238}},
85 | {"name":"conv128_2/conv2/conv/filters","shape":[3,3,128,128],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.0001451439717236687,"min":-0.01712698866339291}},
86 | {"name":"conv128_2/conv2/conv/bias","shape":[128],"dtype":"float32","quantization":{"dtype":"uint8","scale":1.0383988570966347e-9,"min":-1.2356946399449953e-7}},
87 | {"name":"conv128_2/conv2/scale/weights","shape":[128],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.02892604528688917,"min":4.750600814819336}},
88 | {"name":"conv128_2/conv2/scale/biases","shape":[128],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.00797275748907351,"min":-0.7414664464838364}},
89 | {"name":"conv256_down/conv1/conv/filters","shape":[3,3,128,256],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.0002698827827093648,"min":-0.03994265184098599}},
90 | {"name":"conv256_down/conv1/conv/bias","shape":[256],"dtype":"float32","quantization":{"dtype":"uint8","scale":5.036909834755123e-10,"min":-6.396875490139006e-8}},
91 | {"name":"conv256_down/conv1/scale/weights","shape":[256],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.014870181738161573,"min":4.269900798797607}},
92 | {"name":"conv256_down/conv1/scale/biases","shape":[256],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.022031106200872685,"min":-3.1063859743230484}},
93 | {"name":"conv256_down/conv2/conv/filters","shape":[3,3,256,256],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.00046430734150549946,"min":-0.03946612402796745}},
94 | {"name":"conv256_down/conv2/conv/bias","shape":[256],"dtype":"float32","quantization":{"dtype":"uint8","scale":6.693064577513153e-10,"min":-7.630093618364995e-8}},
95 | {"name":"conv256_down/conv2/scale/weights","shape":[256],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.03475512242784687,"min":3.608360528945923}},
96 | {"name":"conv256_down/conv2/scale/biases","shape":[256],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.01290142021927179,"min":-1.1482263995151893}},
97 | {"name":"conv256_1/conv1/conv/filters","shape":[3,3,256,256],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.00037147209924810076,"min":-0.04234781931428348}},
98 | {"name":"conv256_1/conv1/conv/bias","shape":[256],"dtype":"float32","quantization":{"dtype":"uint8","scale":3.2105515457510146e-10,"min":-3.467395669411096e-8}},
99 | {"name":"conv256_1/conv1/scale/weights","shape":[256],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.043242172166412955,"min":5.28542947769165}},
100 | {"name":"conv256_1/conv1/scale/biases","shape":[256],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.01643658619300992,"min":-1.3149268954407936}},
101 | {"name":"conv256_1/conv2/conv/filters","shape":[3,3,256,256],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.0003289232651392619,"min":-0.041773254672686264}},
102 | {"name":"conv256_1/conv2/conv/bias","shape":[256],"dtype":"float32","quantization":{"dtype":"uint8","scale":9.13591691187321e-10,"min":-1.2333487831028833e-7}},
103 | {"name":"conv256_1/conv2/scale/weights","shape":[256],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.0573908618852204,"min":4.360693454742432}},
104 | {"name":"conv256_1/conv2/scale/biases","shape":[256],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.0164216583850337,"min":-1.3958409627278647}},
105 | {"name":"conv256_2/conv1/conv/filters","shape":[3,3,256,256],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.00010476927912118389,"min":-0.015610622589056398}},
106 | {"name":"conv256_2/conv1/conv/bias","shape":[256],"dtype":"float32","quantization":{"dtype":"uint8","scale":2.418552539068639e-10,"min":-2.539480166022071e-8}},
107 | {"name":"conv256_2/conv1/scale/weights","shape":[256],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.06024209564807368,"min":6.598613739013672}},
108 | {"name":"conv256_2/conv1/scale/biases","shape":[256],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.01578534350675695,"min":-1.1049740454729864}},
109 | {"name":"conv256_2/conv2/conv/filters","shape":[3,3,256,256],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.00005543030908002573,"min":-0.007427661416723448}},
110 | {"name":"conv256_2/conv2/conv/bias","shape":[256],"dtype":"float32","quantization":{"dtype":"uint8","scale":1.0822061852320308e-9,"min":-1.515088659324843e-7}},
111 | {"name":"conv256_2/conv2/scale/weights","shape":[256],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.04302893993901272,"min":2.2855491638183594}},
112 | {"name":"conv256_2/conv2/scale/biases","shape":[256],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.006792667566561232,"min":-0.8083274404207865}},
113 | {"name":"conv256_down_out/conv1/conv/filters","shape":[3,3,256,256],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.000568966465253456,"min":-0.05632768006009214}},
114 | {"name":"conv256_down_out/conv1/conv/bias","shape":[256],"dtype":"float32","quantization":{"dtype":"uint8","scale":4.5347887884881677e-10,"min":-6.530095855422961e-8}},
115 | {"name":"conv256_down_out/conv1/scale/weights","shape":[256],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.017565592597512638,"min":4.594101905822754}},
116 | {"name":"conv256_down_out/conv1/scale/biases","shape":[256],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.04850864223405427,"min":-6.306123490427055}},
117 | {"name":"conv256_down_out/conv2/conv/filters","shape":[3,3,256,256],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.0003739110687199761,"min":-0.06954745878191555}},
118 | {"name":"conv256_down_out/conv2/conv/bias","shape":[256],"dtype":"float32","quantization":{"dtype":"uint8","scale":1.2668428328152895e-9,"min":-2.2549802424112154e-7}},
119 | {"name":"conv256_down_out/conv2/scale/weights","shape":[256],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.04351314469879749,"min":4.31956672668457}},
120 | {"name":"conv256_down_out/conv2/scale/biases","shape":[256],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.021499746921015722,"min":-1.2039858275768804}},
121 | {"name":"fc","shape":[256,128],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.000357687911566566,"min":-0.04578405268052045}}
122 | ],
123 | "paths":
124 | [
125 | "face_recognition_model.bin"
126 | ]
127 | }
128 | ]
--------------------------------------------------------------------------------
/public/face-api/models/face_recognition_model.bin:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/blib-la/LoRAdo/d3138e11d6c98e8ad7fe73c2a9d7fea5474ee8ed/public/face-api/models/face_recognition_model.bin
--------------------------------------------------------------------------------
/public/face-api/models/ssd_mobilenetv1_model-weights_manifest.json:
--------------------------------------------------------------------------------
1 | [
2 | {
3 | "weights":
4 | [
5 | {"dtype":"float32","shape":[1,1,512,9],"quantization":{"scale":0.0026856216729856004,"min":-0.34107395246917127,"dtype":"uint8"},"name":"Prediction/BoxPredictor_0/ClassPredictor/weights"},
6 | {"dtype":"float32","shape":[9],"quantization":{"scale":0.00198518248165355,"min":-0.32159956202787515,"dtype":"uint8"},"name":"Prediction/BoxPredictor_0/ClassPredictor/biases"},
7 | {"dtype":"float32","shape":[1,1,1024,18],"quantization":{"scale":0.003060340296988394,"min":-0.489654447518143,"dtype":"uint8"},"name":"Prediction/BoxPredictor_1/ClassPredictor/weights"},
8 | {"dtype":"float32","shape":[18],"quantization":{"scale":0.0008040678851744708,"min":-0.12221831854651957,"dtype":"uint8"},"name":"Prediction/BoxPredictor_1/ClassPredictor/biases"},
9 | {"dtype":"float32","shape":[1,1,512,18],"quantization":{"scale":0.0012513800578958848,"min":-0.16017664741067325,"dtype":"uint8"},"name":"Prediction/BoxPredictor_2/ClassPredictor/weights"},
10 | {"dtype":"float32","shape":[18],"quantization":{"scale":0.000338070518245884,"min":-0.05510549447407909,"dtype":"uint8"},"name":"Prediction/BoxPredictor_2/ClassPredictor/biases"},
11 | {"dtype":"float32","shape":[1,1,256,18],"quantization":{"scale":0.0011819932975021064,"min":-0.1453851755927591,"dtype":"uint8"},"name":"Prediction/BoxPredictor_3/ClassPredictor/weights"},
12 | {"dtype":"float32","shape":[18],"quantization":{"scale":0.00015985782386041154,"min":-0.026536398760828316,"dtype":"uint8"},"name":"Prediction/BoxPredictor_3/ClassPredictor/biases"},
13 | {"dtype":"float32","shape":[1,1,256,18],"quantization":{"scale":0.0007035591438704846,"min":-0.08513065640832863,"dtype":"uint8"},"name":"Prediction/BoxPredictor_4/ClassPredictor/weights"},
14 | {"dtype":"float32","shape":[18],"quantization":{"scale":0.00008793946574716008,"min":-0.013190919862074012,"dtype":"uint8"},"name":"Prediction/BoxPredictor_4/ClassPredictor/biases"},
15 | {"dtype":"float32","shape":[1,1,128,18],"quantization":{"scale":0.00081320781918133,"min":-0.11059626340866088,"dtype":"uint8"},"name":"Prediction/BoxPredictor_5/ClassPredictor/weights"},
16 | {"dtype":"float32","shape":[18],"quantization":{"scale":0.0000980533805547976,"min":-0.014609953702664841,"dtype":"uint8"},"name":"Prediction/BoxPredictor_5/ClassPredictor/biases"},
17 | {"dtype":"int32","shape":[],"quantization":{"scale":1,"min":3,"dtype":"uint8"},"name":"Prediction/BoxPredictor_0/stack_1/2"},
18 | {"dtype":"int32","shape":[3],"quantization":{"scale":0.00392156862745098,"min":0,"dtype":"uint8"},"name":"Postprocessor/Slice/begin"},
19 | {"dtype":"int32","shape":[3],"quantization":{"scale":1,"min":-1,"dtype":"uint8"},"name":"Postprocessor/Slice/size"},
20 | {"dtype":"float32","shape":[1,1,512,12],"quantization":{"scale":0.003730384859384275,"min":-0.4327246436885759,"dtype":"uint8"},"name":"Prediction/BoxPredictor_0/BoxEncodingPredictor/weights"},
21 | {"dtype":"float32","shape":[12],"quantization":{"scale":0.0018744708568442102,"min":-0.3917644090804399,"dtype":"uint8"},"name":"Prediction/BoxPredictor_0/BoxEncodingPredictor/biases"},
22 | {"dtype":"int32","shape":[],"quantization":{"scale":1,"min":3072,"dtype":"uint8"},"name":"Prediction/BoxPredictor_0/stack_1/1"},
23 | {"dtype":"float32","shape":[1,1,1024,24],"quantization":{"scale":0.00157488017689948,"min":-0.20000978246623397,"dtype":"uint8"},"name":"Prediction/BoxPredictor_1/BoxEncodingPredictor/weights"},
24 | {"dtype":"float32","shape":[24],"quantization":{"scale":0.0002823906713256649,"min":-0.043488163384152394,"dtype":"uint8"},"name":"Prediction/BoxPredictor_1/BoxEncodingPredictor/biases"},
25 | {"dtype":"int32","shape":[],"quantization":{"scale":1,"min":1536,"dtype":"uint8"},"name":"Prediction/BoxPredictor_1/stack_1/1"},
26 | {"dtype":"float32","shape":[1,1,512,24],"quantization":{"scale":0.0007974451663447361,"min":-0.11004743295557358,"dtype":"uint8"},"name":"Prediction/BoxPredictor_2/BoxEncodingPredictor/weights"},
27 | {"dtype":"float32","shape":[24],"quantization":{"scale":0.0001350417988849621,"min":-0.02039131163162928,"dtype":"uint8"},"name":"Prediction/BoxPredictor_2/BoxEncodingPredictor/biases"},
28 | {"dtype":"int32","shape":[],"quantization":{"scale":1,"min":384,"dtype":"uint8"},"name":"Prediction/BoxPredictor_2/stack_1/1"},
29 | {"dtype":"float32","shape":[1,1,256,24],"quantization":{"scale":0.0007113990246080885,"min":-0.0860792819775787,"dtype":"uint8"},"name":"Prediction/BoxPredictor_3/BoxEncodingPredictor/weights"},
30 | {"dtype":"float32","shape":[24],"quantization":{"scale":0.000050115815418608046,"min":-0.007617603943628423,"dtype":"uint8"},"name":"Prediction/BoxPredictor_3/BoxEncodingPredictor/biases"},
31 | {"dtype":"int32","shape":[],"quantization":{"scale":1,"min":96,"dtype":"uint8"},"name":"Prediction/BoxPredictor_3/stack_1/1"},
32 | {"dtype":"float32","shape":[1,1,256,24],"quantization":{"scale":0.000590049314732645,"min":-0.06903576982371946,"dtype":"uint8"},"name":"Prediction/BoxPredictor_4/BoxEncodingPredictor/weights"},
33 | {"dtype":"float32","shape":[24],"quantization":{"scale":0.00003513663861097074,"min":-0.006359731588585704,"dtype":"uint8"},"name":"Prediction/BoxPredictor_4/BoxEncodingPredictor/biases"},
34 | {"dtype":"int32","shape":[],"quantization":{"scale":1,"min":24,"dtype":"uint8"},"name":"Prediction/BoxPredictor_4/stack_1/1"},
35 | {"dtype":"float32","shape":[1,1,128,24],"quantization":{"scale":0.0005990567744946948,"min":-0.07907549423329971,"dtype":"uint8"},"name":"Prediction/BoxPredictor_5/BoxEncodingPredictor/weights"},
36 | {"dtype":"float32","shape":[24],"quantization":{"scale":0.00003392884288640583,"min":-0.006039334033780238,"dtype":"uint8"},"name":"Prediction/BoxPredictor_5/BoxEncodingPredictor/biases"},
37 | {"dtype":"float32","shape":[],"quantization":{"scale":1,"min":0.007843137718737125,"dtype":"uint8"},"name":"Preprocessor/mul/x"},
38 | {"dtype":"int32","shape":[2],"quantization":{"scale":1,"min":512,"dtype":"uint8"},"name":"Preprocessor/ResizeImage/size"},
39 | {"dtype":"float32","shape":[],"quantization":{"scale":1,"min":1,"dtype":"uint8"},"name":"Preprocessor/sub/y"},
40 | {"dtype":"float32","shape":[3,3,3,32],"quantization":{"scale":0.03948551065781537,"min":-5.014659853542552,"dtype":"uint8"},"name":"MobilenetV1/Conv2d_0_pointwise/weights"},
41 | {"dtype":"float32","shape":[32],"quantization":{"scale":0.0498106133704092,"min":-7.371970778820562,"dtype":"uint8"},"name":"MobilenetV1/Conv2d_0_pointwise/convolution_bn_offset"},
42 | {"dtype":"float32","shape":[3,3,32,1],"quantization":{"scale":0.036833542468501075,"min":-4.714693435968138,"dtype":"uint8"},"name":"MobilenetV1/Conv2d_1_depthwise/depthwise_weights"},
43 | {"dtype":"float32","shape":[32],"quantization":{"scale":0.012173276705046495,"min":-0.012173276705046495,"dtype":"uint8"},"name":"MobilenetV1/Conv2d_1_depthwise/BatchNorm/gamma"},
44 | {"dtype":"float32","shape":[32],"quantization":{"scale":0.032182769214405736,"min":-2.4780732295092416,"dtype":"uint8"},"name":"MobilenetV1/Conv2d_1_depthwise/BatchNorm/beta"},
45 | {"dtype":"float32","shape":[32],"quantization":{"scale":0.028287527607936486,"min":-3.366215785344442,"dtype":"uint8"},"name":"MobilenetV1/Conv2d_1_depthwise/BatchNorm/moving_mean"},
46 | {"dtype":"float32","shape":[32],"quantization":{"scale":0.04716738532571232,"min":3.9071404665769224e-36,"dtype":"uint8"},"name":"MobilenetV1/Conv2d_1_depthwise/BatchNorm/moving_variance"},
47 | {"dtype":"float32","shape":[1,1,32,64],"quantization":{"scale":0.04010109433940812,"min":-4.290817094316669,"dtype":"uint8"},"name":"MobilenetV1/Conv2d_1_pointwise/weights"},
48 | {"dtype":"float32","shape":[64],"quantization":{"scale":0.2212210038129021,"min":-34.51047659481273,"dtype":"uint8"},"name":"MobilenetV1/Conv2d_1_pointwise/convolution_bn_offset"},
49 | {"dtype":"float32","shape":[3,3,64,1],"quantization":{"scale":0.010024750933927648,"min":-1.343316625146305,"dtype":"uint8"},"name":"MobilenetV1/Conv2d_2_depthwise/depthwise_weights"},
50 | {"dtype":"float32","shape":[64],"quantization":{"scale":0.006120916675118839,"min":0.5227176547050476,"dtype":"uint8"},"name":"MobilenetV1/Conv2d_2_depthwise/BatchNorm/gamma"},
51 | {"dtype":"float32","shape":[64],"quantization":{"scale":0.02317035385206634,"min":-0.7646216771181892,"dtype":"uint8"},"name":"MobilenetV1/Conv2d_2_depthwise/BatchNorm/beta"},
52 | {"dtype":"float32","shape":[64],"quantization":{"scale":0.04980821422502106,"min":-5.8275610643274645,"dtype":"uint8"},"name":"MobilenetV1/Conv2d_2_depthwise/BatchNorm/moving_mean"},
53 | {"dtype":"float32","shape":[64],"quantization":{"scale":0.051751047022202436,"min":3.916113799002297e-36,"dtype":"uint8"},"name":"MobilenetV1/Conv2d_2_depthwise/BatchNorm/moving_variance"},
54 | {"dtype":"float32","shape":[1,1,64,128],"quantization":{"scale":0.021979344124887504,"min":-2.1319963801140878,"dtype":"uint8"},"name":"MobilenetV1/Conv2d_2_pointwise/weights"},
55 | {"dtype":"float32","shape":[128],"quantization":{"scale":0.09958663267247816,"min":-11.054116226645077,"dtype":"uint8"},"name":"MobilenetV1/Conv2d_2_pointwise/convolution_bn_offset"},
56 | {"dtype":"float32","shape":[3,3,128,1],"quantization":{"scale":0.01943492702409333,"min":-2.6237151482525993,"dtype":"uint8"},"name":"MobilenetV1/Conv2d_3_depthwise/depthwise_weights"},
57 | {"dtype":"float32","shape":[128],"quantization":{"scale":0.017852897737540452,"min":0.40204083919525146,"dtype":"uint8"},"name":"MobilenetV1/Conv2d_3_depthwise/BatchNorm/gamma"},
58 | {"dtype":"float32","shape":[128],"quantization":{"scale":0.029888209174661076,"min":-1.972621805527631,"dtype":"uint8"},"name":"MobilenetV1/Conv2d_3_depthwise/BatchNorm/beta"},
59 | {"dtype":"float32","shape":[128],"quantization":{"scale":0.029319268581913967,"min":-5.130872001834945,"dtype":"uint8"},"name":"MobilenetV1/Conv2d_3_depthwise/BatchNorm/moving_mean"},
60 | {"dtype":"float32","shape":[128],"quantization":{"scale":0.014018708584355373,"min":3.9083178263362604e-36,"dtype":"uint8"},"name":"MobilenetV1/Conv2d_3_depthwise/BatchNorm/moving_variance"},
61 | {"dtype":"float32","shape":[1,1,128,128],"quantization":{"scale":0.020776657964669022,"min":-2.5347522716896207,"dtype":"uint8"},"name":"MobilenetV1/Conv2d_3_pointwise/weights"},
62 | {"dtype":"float32","shape":[128],"quantization":{"scale":0.14383157094319662,"min":-9.636715253194174,"dtype":"uint8"},"name":"MobilenetV1/Conv2d_3_pointwise/convolution_bn_offset"},
63 | {"dtype":"float32","shape":[3,3,128,1],"quantization":{"scale":0.004463558571011412,"min":-0.5981168485155293,"dtype":"uint8"},"name":"MobilenetV1/Conv2d_4_depthwise/depthwise_weights"},
64 | {"dtype":"float32","shape":[128],"quantization":{"scale":0.006487431245691636,"min":0.47910428047180176,"dtype":"uint8"},"name":"MobilenetV1/Conv2d_4_depthwise/BatchNorm/gamma"},
65 | {"dtype":"float32","shape":[128],"quantization":{"scale":0.026542164297664865,"min":-1.2209395576925839,"dtype":"uint8"},"name":"MobilenetV1/Conv2d_4_depthwise/BatchNorm/beta"},
66 | {"dtype":"float32","shape":[128],"quantization":{"scale":0.05119945675719018,"min":-8.60150873520795,"dtype":"uint8"},"name":"MobilenetV1/Conv2d_4_depthwise/BatchNorm/moving_mean"},
67 | {"dtype":"float32","shape":[128],"quantization":{"scale":0.03081628388049556,"min":3.911508751095344e-36,"dtype":"uint8"},"name":"MobilenetV1/Conv2d_4_depthwise/BatchNorm/moving_variance"},
68 | {"dtype":"float32","shape":[1,1,128,256],"quantization":{"scale":0.010758659886378868,"min":-1.0328313490923713,"dtype":"uint8"},"name":"MobilenetV1/Conv2d_4_pointwise/weights"},
69 | {"dtype":"float32","shape":[256],"quantization":{"scale":0.08058219610476026,"min":-9.34753474815219,"dtype":"uint8"},"name":"MobilenetV1/Conv2d_4_pointwise/convolution_bn_offset"},
70 | {"dtype":"float32","shape":[3,3,256,1],"quantization":{"scale":0.01145936741548426,"min":-1.3292866201961742,"dtype":"uint8"},"name":"MobilenetV1/Conv2d_5_depthwise/depthwise_weights"},
71 | {"dtype":"float32","shape":[256],"quantization":{"scale":0.0083988838336047,"min":0.36280909180641174,"dtype":"uint8"},"name":"MobilenetV1/Conv2d_5_depthwise/BatchNorm/gamma"},
72 | {"dtype":"float32","shape":[256],"quantization":{"scale":0.02858148649627087,"min":-3.6584302715226715,"dtype":"uint8"},"name":"MobilenetV1/Conv2d_5_depthwise/BatchNorm/beta"},
73 | {"dtype":"float32","shape":[256],"quantization":{"scale":0.03988401375564874,"min":-7.099354448505476,"dtype":"uint8"},"name":"MobilenetV1/Conv2d_5_depthwise/BatchNorm/moving_mean"},
74 | {"dtype":"float32","shape":[256],"quantization":{"scale":0.009090481683904049,"min":0.020878996700048447,"dtype":"uint8"},"name":"MobilenetV1/Conv2d_5_depthwise/BatchNorm/moving_variance"},
75 | {"dtype":"float32","shape":[1,1,256,256],"quantization":{"scale":0.008951201625898773,"min":-1.1189002032373465,"dtype":"uint8"},"name":"MobilenetV1/Conv2d_5_pointwise/weights"},
76 | {"dtype":"float32","shape":[256],"quantization":{"scale":0.051758006974762565,"min":-5.745138774198645,"dtype":"uint8"},"name":"MobilenetV1/Conv2d_5_pointwise/convolution_bn_offset"},
77 | {"dtype":"float32","shape":[3,3,256,1],"quantization":{"scale":0.004110433190476661,"min":-0.6042336790000691,"dtype":"uint8"},"name":"MobilenetV1/Conv2d_6_depthwise/depthwise_weights"},
78 | {"dtype":"float32","shape":[256],"quantization":{"scale":0.013170199768216002,"min":0.3386639356613159,"dtype":"uint8"},"name":"MobilenetV1/Conv2d_6_depthwise/BatchNorm/gamma"},
79 | {"dtype":"float32","shape":[256],"quantization":{"scale":0.03599378548416437,"min":-3.70735990486893,"dtype":"uint8"},"name":"MobilenetV1/Conv2d_6_depthwise/BatchNorm/beta"},
80 | {"dtype":"float32","shape":[256],"quantization":{"scale":0.026967673208199296,"min":-3.748506575939702,"dtype":"uint8"},"name":"MobilenetV1/Conv2d_6_depthwise/BatchNorm/moving_mean"},
81 | {"dtype":"float32","shape":[256],"quantization":{"scale":0.012615410486857097,"min":3.9111388979838637e-36,"dtype":"uint8"},"name":"MobilenetV1/Conv2d_6_depthwise/BatchNorm/moving_variance"},
82 | {"dtype":"float32","shape":[1,1,256,512],"quantization":{"scale":0.00822840648538926,"min":-1.1848905338960536,"dtype":"uint8"},"name":"MobilenetV1/Conv2d_6_pointwise/weights"},
83 | {"dtype":"float32","shape":[512],"quantization":{"scale":0.06608965817619772,"min":-7.468131373910342,"dtype":"uint8"},"name":"MobilenetV1/Conv2d_6_pointwise/convolution_bn_offset"},
84 | {"dtype":"float32","shape":[3,3,512,1],"quantization":{"scale":0.008801074355256323,"min":-0.9593171047229393,"dtype":"uint8"},"name":"MobilenetV1/Conv2d_7_depthwise/depthwise_weights"},
85 | {"dtype":"float32","shape":[512],"quantization":{"scale":0.030577416513480393,"min":0.3285980224609375,"dtype":"uint8"},"name":"MobilenetV1/Conv2d_7_depthwise/BatchNorm/gamma"},
86 | {"dtype":"float32","shape":[512],"quantization":{"scale":0.04778536441279393,"min":-8.935863145192464,"dtype":"uint8"},"name":"MobilenetV1/Conv2d_7_depthwise/BatchNorm/beta"},
87 | {"dtype":"float32","shape":[512],"quantization":{"scale":0.04331884945140165,"min":-9.660103427662568,"dtype":"uint8"},"name":"MobilenetV1/Conv2d_7_depthwise/BatchNorm/moving_mean"},
88 | {"dtype":"float32","shape":[512],"quantization":{"scale":0.04126455444367785,"min":0.000604183878749609,"dtype":"uint8"},"name":"MobilenetV1/Conv2d_7_depthwise/BatchNorm/moving_variance"},
89 | {"dtype":"float32","shape":[1,1,512,512],"quantization":{"scale":0.009305818408143287,"min":-1.1446156642016243,"dtype":"uint8"},"name":"MobilenetV1/Conv2d_7_pointwise/weights"},
90 | {"dtype":"float32","shape":[512],"quantization":{"scale":0.04640720217835669,"min":-4.733534622192383,"dtype":"uint8"},"name":"MobilenetV1/Conv2d_7_pointwise/convolution_bn_offset"},
91 | {"dtype":"float32","shape":[3,3,512,1],"quantization":{"scale":0.008138792655047248,"min":-0.9766551186056698,"dtype":"uint8"},"name":"MobilenetV1/Conv2d_8_depthwise/depthwise_weights"},
92 | {"dtype":"float32","shape":[512],"quantization":{"scale":0.027351748358969596,"min":0.34030041098594666,"dtype":"uint8"},"name":"MobilenetV1/Conv2d_8_depthwise/BatchNorm/gamma"},
93 | {"dtype":"float32","shape":[512],"quantization":{"scale":0.04415061053107767,"min":-7.019947074441349,"dtype":"uint8"},"name":"MobilenetV1/Conv2d_8_depthwise/BatchNorm/beta"},
94 | {"dtype":"float32","shape":[512],"quantization":{"scale":0.02476683784933651,"min":-2.9224868662217083,"dtype":"uint8"},"name":"MobilenetV1/Conv2d_8_depthwise/BatchNorm/moving_mean"},
95 | {"dtype":"float32","shape":[512],"quantization":{"scale":0.02547598832684076,"min":0.00026032101595774293,"dtype":"uint8"},"name":"MobilenetV1/Conv2d_8_depthwise/BatchNorm/moving_variance"},
96 | {"dtype":"float32","shape":[1,1,512,512],"quantization":{"scale":0.01083052625843123,"min":-1.2563410459780227,"dtype":"uint8"},"name":"MobilenetV1/Conv2d_8_pointwise/weights"},
97 | {"dtype":"float32","shape":[512],"quantization":{"scale":0.06360894371481503,"min":-7.951117964351878,"dtype":"uint8"},"name":"MobilenetV1/Conv2d_8_pointwise/convolution_bn_offset"},
98 | {"dtype":"float32","shape":[3,3,512,1],"quantization":{"scale":0.006704086883395326,"min":-0.8648272079579971,"dtype":"uint8"},"name":"MobilenetV1/Conv2d_9_depthwise/depthwise_weights"},
99 | {"dtype":"float32","shape":[512],"quantization":{"scale":0.015343831567203297,"min":0.2711026668548584,"dtype":"uint8"},"name":"MobilenetV1/Conv2d_9_depthwise/BatchNorm/gamma"},
100 | {"dtype":"float32","shape":[512],"quantization":{"scale":0.03378283930759804,"min":-4.797163181678922,"dtype":"uint8"},"name":"MobilenetV1/Conv2d_9_depthwise/BatchNorm/beta"},
101 | {"dtype":"float32","shape":[512],"quantization":{"scale":0.021910778213949763,"min":-3.987761634938857,"dtype":"uint8"},"name":"MobilenetV1/Conv2d_9_depthwise/BatchNorm/moving_mean"},
102 | {"dtype":"float32","shape":[512],"quantization":{"scale":0.009284070410007296,"min":0.000021581046894425526,"dtype":"uint8"},"name":"MobilenetV1/Conv2d_9_depthwise/BatchNorm/moving_variance"},
103 | {"dtype":"float32","shape":[1,1,512,512],"quantization":{"scale":0.012783036979974485,"min":-1.9046725100161983,"dtype":"uint8"},"name":"MobilenetV1/Conv2d_9_pointwise/weights"},
104 | {"dtype":"float32","shape":[512],"quantization":{"scale":0.07273082733154297,"min":-9.52773838043213,"dtype":"uint8"},"name":"MobilenetV1/Conv2d_9_pointwise/convolution_bn_offset"},
105 | {"dtype":"float32","shape":[3,3,512,1],"quantization":{"scale":0.006126228033327589,"min":-0.7351473639993107,"dtype":"uint8"},"name":"MobilenetV1/Conv2d_10_depthwise/depthwise_weights"},
106 | {"dtype":"float32","shape":[512],"quantization":{"scale":0.029703759212119908,"min":0.28687000274658203,"dtype":"uint8"},"name":"MobilenetV1/Conv2d_10_depthwise/BatchNorm/gamma"},
107 | {"dtype":"float32","shape":[512],"quantization":{"scale":0.04394429898729511,"min":-6.3279790541704966,"dtype":"uint8"},"name":"MobilenetV1/Conv2d_10_depthwise/BatchNorm/beta"},
108 | {"dtype":"float32","shape":[512],"quantization":{"scale":0.016566915605582443,"min":-2.7501079905266854,"dtype":"uint8"},"name":"MobilenetV1/Conv2d_10_depthwise/BatchNorm/moving_mean"},
109 | {"dtype":"float32","shape":[512],"quantization":{"scale":0.012152872833551145,"min":3.913338286370366e-36,"dtype":"uint8"},"name":"MobilenetV1/Conv2d_10_depthwise/BatchNorm/moving_variance"},
110 | {"dtype":"float32","shape":[1,1,512,512],"quantization":{"scale":0.01354524388032801,"min":-1.7473364605623134,"dtype":"uint8"},"name":"MobilenetV1/Conv2d_10_pointwise/weights"},
111 | {"dtype":"float32","shape":[512],"quantization":{"scale":0.08566816367355047,"min":-9.937506986131854,"dtype":"uint8"},"name":"MobilenetV1/Conv2d_10_pointwise/convolution_bn_offset"},
112 | {"dtype":"float32","shape":[3,3,512,1],"quantization":{"scale":0.006012305558896532,"min":-0.7876120282154457,"dtype":"uint8"},"name":"MobilenetV1/Conv2d_11_depthwise/depthwise_weights"},
113 | {"dtype":"float32","shape":[512],"quantization":{"scale":0.01469323155926723,"min":0.29223933815956116,"dtype":"uint8"},"name":"MobilenetV1/Conv2d_11_depthwise/BatchNorm/gamma"},
114 | {"dtype":"float32","shape":[512],"quantization":{"scale":0.030889174517463234,"min":-3.2433633243336395,"dtype":"uint8"},"name":"MobilenetV1/Conv2d_11_depthwise/BatchNorm/beta"},
115 | {"dtype":"float32","shape":[512],"quantization":{"scale":0.014836942448335536,"min":-2.047498057870304,"dtype":"uint8"},"name":"MobilenetV1/Conv2d_11_depthwise/BatchNorm/moving_mean"},
116 | {"dtype":"float32","shape":[512],"quantization":{"scale":0.007234466105343445,"min":0.00013165915152058005,"dtype":"uint8"},"name":"MobilenetV1/Conv2d_11_depthwise/BatchNorm/moving_variance"},
117 | {"dtype":"float32","shape":[1,1,512,512],"quantization":{"scale":0.016261722527298274,"min":-1.4798167499841428,"dtype":"uint8"},"name":"MobilenetV1/Conv2d_11_pointwise/weights"},
118 | {"dtype":"float32","shape":[512],"quantization":{"scale":0.091437328563017,"min":-14.172785927267636,"dtype":"uint8"},"name":"MobilenetV1/Conv2d_11_pointwise/convolution_bn_offset"},
119 | {"dtype":"float32","shape":[3,3,512,1],"quantization":{"scale":0.004750356487199372,"min":-0.650798838746314,"dtype":"uint8"},"name":"MobilenetV1/Conv2d_12_depthwise/depthwise_weights"},
120 | {"dtype":"float32","shape":[512],"quantization":{"scale":0.008174965545242907,"min":0.3120670020580292,"dtype":"uint8"},"name":"MobilenetV1/Conv2d_12_depthwise/BatchNorm/gamma"},
121 | {"dtype":"float32","shape":[512],"quantization":{"scale":0.030133422215779623,"min":-2.41067377726237,"dtype":"uint8"},"name":"MobilenetV1/Conv2d_12_depthwise/BatchNorm/beta"},
122 | {"dtype":"float32","shape":[512],"quantization":{"scale":0.006088157261119169,"min":-0.7853722866843729,"dtype":"uint8"},"name":"MobilenetV1/Conv2d_12_depthwise/BatchNorm/moving_mean"},
123 | {"dtype":"float32","shape":[512],"quantization":{"scale":0.003668997334498985,"min":3.9124486300013356e-36,"dtype":"uint8"},"name":"MobilenetV1/Conv2d_12_depthwise/BatchNorm/moving_variance"},
124 | {"dtype":"float32","shape":[1,1,512,1024],"quantization":{"scale":0.010959514449624454,"min":-1.4028178495519301,"dtype":"uint8"},"name":"MobilenetV1/Conv2d_12_pointwise/weights"},
125 | {"dtype":"float32","shape":[1024],"quantization":{"scale":0.10896045834410424,"min":-14.818622334798176,"dtype":"uint8"},"name":"MobilenetV1/Conv2d_12_pointwise/convolution_bn_offset"},
126 | {"dtype":"float32","shape":[3,3,1024,1],"quantization":{"scale":0.004633033509347953,"min":-0.5652300881404502,"dtype":"uint8"},"name":"MobilenetV1/Conv2d_13_depthwise/depthwise_weights"},
127 | {"dtype":"float32","shape":[1024],"quantization":{"scale":0.022285057224479377,"min":0.23505790531635284,"dtype":"uint8"},"name":"MobilenetV1/Conv2d_13_depthwise/BatchNorm/gamma"},
128 | {"dtype":"float32","shape":[1024],"quantization":{"scale":0.0324854850769043,"min":-3.9957146644592285,"dtype":"uint8"},"name":"MobilenetV1/Conv2d_13_depthwise/BatchNorm/beta"},
129 | {"dtype":"float32","shape":[1024],"quantization":{"scale":0.014760061806323482,"min":-2.125448900110581,"dtype":"uint8"},"name":"MobilenetV1/Conv2d_13_depthwise/BatchNorm/moving_mean"},
130 | {"dtype":"float32","shape":[1024],"quantization":{"scale":0.0036057423142825855,"min":3.9067056828997994e-36,"dtype":"uint8"},"name":"MobilenetV1/Conv2d_13_depthwise/BatchNorm/moving_variance"},
131 | {"dtype":"float32","shape":[1,1,1024,1024],"quantization":{"scale":0.017311988157384536,"min":-2.094750567043529,"dtype":"uint8"},"name":"MobilenetV1/Conv2d_13_pointwise/weights"},
132 | {"dtype":"float32","shape":[1024],"quantization":{"scale":0.16447528764313343,"min":-25.658144872328815,"dtype":"uint8"},"name":"MobilenetV1/Conv2d_13_pointwise/convolution_bn_offset"},
133 | {"dtype":"float32","shape":[1,1,1024,256],"quantization":{"scale":0.0026493051472832175,"min":-0.36825341547236723,"dtype":"uint8"},"name":"Prediction/Conv2d_0_pointwise/weights"},
134 | {"dtype":"float32","shape":[256],"quantization":{"scale":0.012474596734140433,"min":-2.3078003958159803,"dtype":"uint8"},"name":"Prediction/Conv2d_0_pointwise/convolution_bn_offset"},
135 | {"dtype":"float32","shape":[3,3,256,512],"quantization":{"scale":0.014533351449405445,"min":-1.8166689311756807,"dtype":"uint8"},"name":"Prediction/Conv2d_1_pointwise/weights"},
136 | {"dtype":"float32","shape":[512],"quantization":{"scale":0.024268776762719248,"min":-2.4754152297973633,"dtype":"uint8"},"name":"Prediction/Conv2d_1_pointwise/convolution_bn_offset"},
137 | {"dtype":"float32","shape":[1,1,512,128],"quantization":{"scale":0.002208403746287028,"min":-0.28709248701731366,"dtype":"uint8"},"name":"Prediction/Conv2d_2_pointwise/weights"},
138 | {"dtype":"float32","shape":[128],"quantization":{"scale":0.012451349052728392,"min":-1.5937726787492341,"dtype":"uint8"},"name":"Prediction/Conv2d_2_pointwise/convolution_bn_offset"},
139 | {"dtype":"float32","shape":[3,3,128,256],"quantization":{"scale":0.026334229637594783,"min":-2.8967652601354263,"dtype":"uint8"},"name":"Prediction/Conv2d_3_pointwise/weights"},
140 | {"dtype":"float32","shape":[256],"quantization":{"scale":0.02509917792151956,"min":-1.4055539636050953,"dtype":"uint8"},"name":"Prediction/Conv2d_3_pointwise/convolution_bn_offset"},
141 | {"dtype":"float32","shape":[1,1,256,128],"quantization":{"scale":0.004565340046789132,"min":-0.3971845840706545,"dtype":"uint8"},"name":"Prediction/Conv2d_4_pointwise/weights"},
142 | {"dtype":"float32","shape":[128],"quantization":{"scale":0.017302456556581983,"min":-2.5953684834872974,"dtype":"uint8"},"name":"Prediction/Conv2d_4_pointwise/convolution_bn_offset"},
143 | {"dtype":"float32","shape":[3,3,128,256],"quantization":{"scale":0.025347338470758176,"min":-3.8527954475552426,"dtype":"uint8"},"name":"Prediction/Conv2d_5_pointwise/weights"},
144 | {"dtype":"float32","shape":[256],"quantization":{"scale":0.033134659598855414,"min":-2.9158500446992766,"dtype":"uint8"},"name":"Prediction/Conv2d_5_pointwise/convolution_bn_offset"},
145 | {"dtype":"float32","shape":[1,1,256,64],"quantization":{"scale":0.002493104397081861,"min":-0.2817207968702503,"dtype":"uint8"},"name":"Prediction/Conv2d_6_pointwise/weights"},
146 | {"dtype":"float32","shape":[64],"quantization":{"scale":0.011383360974928912,"min":-1.2749364291920382,"dtype":"uint8"},"name":"Prediction/Conv2d_6_pointwise/convolution_bn_offset"},
147 | {"dtype":"float32","shape":[3,3,64,128],"quantization":{"scale":0.020821522731407017,"min":-2.7484410005457263,"dtype":"uint8"},"name":"Prediction/Conv2d_7_pointwise/weights"},
148 | {"dtype":"float32","shape":[128],"quantization":{"scale":0.052144218893612135,"min":-3.5979511036592373,"dtype":"uint8"},"name":"Prediction/Conv2d_7_pointwise/convolution_bn_offset"},
149 | {"dtype":"int32","shape":[],"quantization":{"scale":1,"min":6,"dtype":"uint8"},"name":"Prediction/BoxPredictor_5/stack_1/1"},
150 | {"dtype":"int32","shape":[],"quantization":{"scale":1,"min":1,"dtype":"uint8"},"name":"concat_1/axis"},
151 | {"dtype":"int32","shape":[1],"quantization":{"scale":1,"min":0,"dtype":"uint8"},"name":"Prediction/BoxPredictor_0/strided_slice/stack"},
152 | {"dtype":"int32","shape":[1],"quantization":{"scale":1,"min":1,"dtype":"uint8"},"name":"Prediction/BoxPredictor_0/strided_slice/stack_1"},
153 | {"dtype":"int32","shape":[],"quantization":{"scale":1,"min":5118,"dtype":"uint8"},"name":"Postprocessor/stack/1"},
154 | {"dtype":"int32","shape":[],"quantization":{"scale":1,"min":4,"dtype":"uint8"},"name":"Prediction/BoxPredictor_0/stack/3"},
155 | {"dtype":"float32","shape":[1,5118,4],"name":"Output/extra_dim"}
156 | ],
157 | "paths":
158 | [
159 | "ssd_mobilenetv1_model.bin"
160 | ]
161 | }
162 | ]
--------------------------------------------------------------------------------
/public/face-api/models/ssd_mobilenetv1_model.bin:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/blib-la/LoRAdo/d3138e11d6c98e8ad7fe73c2a9d7fea5474ee8ed/public/face-api/models/ssd_mobilenetv1_model.bin
--------------------------------------------------------------------------------
/public/face-api/models/tiny_face_detector_model-weights_manifest.json:
--------------------------------------------------------------------------------
1 | [
2 | {
3 | "weights":
4 | [
5 | {"name":"conv0/filters","shape":[3,3,3,16],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.009007044399485869,"min":-1.2069439495311063}},
6 | {"name":"conv0/bias","shape":[16],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.005263455241334205,"min":-0.9211046672334858}},
7 | {"name":"conv1/depthwise_filter","shape":[3,3,16,1],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.004001977630690033,"min":-0.5042491814669441}},
8 | {"name":"conv1/pointwise_filter","shape":[1,1,16,32],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.013836609615999109,"min":-1.411334180831909}},
9 | {"name":"conv1/bias","shape":[32],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.0015159862590771096,"min":-0.30926119685173037}},
10 | {"name":"conv2/depthwise_filter","shape":[3,3,32,1],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.002666276225856706,"min":-0.317286870876948}},
11 | {"name":"conv2/pointwise_filter","shape":[1,1,32,64],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.015265831292844286,"min":-1.6792414422128714}},
12 | {"name":"conv2/bias","shape":[64],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.0020280554598453,"min":-0.37113414915168985}},
13 | {"name":"conv3/depthwise_filter","shape":[3,3,64,1],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.006100742489683862,"min":-0.8907084034938438}},
14 | {"name":"conv3/pointwise_filter","shape":[1,1,64,128],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.016276211832083907,"min":-2.0508026908425725}},
15 | {"name":"conv3/bias","shape":[128],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.003394414279975143,"min":-0.7637432129944072}},
16 | {"name":"conv4/depthwise_filter","shape":[3,3,128,1],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.006716050119961009,"min":-0.8059260143953211}},
17 | {"name":"conv4/pointwise_filter","shape":[1,1,128,256],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.021875603993733724,"min":-2.8875797271728514}},
18 | {"name":"conv4/bias","shape":[256],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.0041141652009066415,"min":-0.8187188749804216}},
19 | {"name":"conv5/depthwise_filter","shape":[3,3,256,1],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.008423839597141042,"min":-0.9013508368940915}},
20 | {"name":"conv5/pointwise_filter","shape":[1,1,256,512],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.030007277283014035,"min":-3.8709387695088107}},
21 | {"name":"conv5/bias","shape":[512],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.008402082966823203,"min":-1.4871686851277068}},
22 | {"name":"conv8/filters","shape":[1,1,512,25],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.028336129469030042,"min":-4.675461362389957}},
23 | {"name":"conv8/bias","shape":[25],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.002268134028303857,"min":-0.41053225912299807}}
24 | ],
25 | "paths":
26 | [
27 | "tiny_face_detector_model.bin"
28 | ]
29 | }
30 | ]
--------------------------------------------------------------------------------
/public/face-api/models/tiny_face_detector_model.bin:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/blib-la/LoRAdo/d3138e11d6c98e8ad7fe73c2a9d7fea5474ee8ed/public/face-api/models/tiny_face_detector_model.bin
--------------------------------------------------------------------------------
/public/icons/android-chrome-192x192.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/blib-la/LoRAdo/d3138e11d6c98e8ad7fe73c2a9d7fea5474ee8ed/public/icons/android-chrome-192x192.png
--------------------------------------------------------------------------------
/public/icons/android-chrome-512x512.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/blib-la/LoRAdo/d3138e11d6c98e8ad7fe73c2a9d7fea5474ee8ed/public/icons/android-chrome-512x512.png
--------------------------------------------------------------------------------
/public/icons/apple-touch-icon.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/blib-la/LoRAdo/d3138e11d6c98e8ad7fe73c2a9d7fea5474ee8ed/public/icons/apple-touch-icon.png
--------------------------------------------------------------------------------
/public/icons/browserconfig.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 | #223431
7 |
8 |
9 |
10 |
--------------------------------------------------------------------------------
/public/icons/favicon-16x16.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/blib-la/LoRAdo/d3138e11d6c98e8ad7fe73c2a9d7fea5474ee8ed/public/icons/favicon-16x16.png
--------------------------------------------------------------------------------
/public/icons/favicon-32x32.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/blib-la/LoRAdo/d3138e11d6c98e8ad7fe73c2a9d7fea5474ee8ed/public/icons/favicon-32x32.png
--------------------------------------------------------------------------------
/public/icons/favicon.ico:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/blib-la/LoRAdo/d3138e11d6c98e8ad7fe73c2a9d7fea5474ee8ed/public/icons/favicon.ico
--------------------------------------------------------------------------------
/public/icons/mstile-144x144.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/blib-la/LoRAdo/d3138e11d6c98e8ad7fe73c2a9d7fea5474ee8ed/public/icons/mstile-144x144.png
--------------------------------------------------------------------------------
/public/icons/mstile-150x150.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/blib-la/LoRAdo/d3138e11d6c98e8ad7fe73c2a9d7fea5474ee8ed/public/icons/mstile-150x150.png
--------------------------------------------------------------------------------
/public/icons/mstile-310x150.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/blib-la/LoRAdo/d3138e11d6c98e8ad7fe73c2a9d7fea5474ee8ed/public/icons/mstile-310x150.png
--------------------------------------------------------------------------------
/public/icons/mstile-310x310.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/blib-la/LoRAdo/d3138e11d6c98e8ad7fe73c2a9d7fea5474ee8ed/public/icons/mstile-310x310.png
--------------------------------------------------------------------------------
/public/icons/mstile-70x70.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/blib-la/LoRAdo/d3138e11d6c98e8ad7fe73c2a9d7fea5474ee8ed/public/icons/mstile-70x70.png
--------------------------------------------------------------------------------
/public/images/anamnesis33/example (1).jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/blib-la/LoRAdo/d3138e11d6c98e8ad7fe73c2a9d7fea5474ee8ed/public/images/anamnesis33/example (1).jpg
--------------------------------------------------------------------------------
/public/images/anamnesis33/example (2).jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/blib-la/LoRAdo/d3138e11d6c98e8ad7fe73c2a9d7fea5474ee8ed/public/images/anamnesis33/example (2).jpg
--------------------------------------------------------------------------------
/public/images/anamnesis33/example (3).jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/blib-la/LoRAdo/d3138e11d6c98e8ad7fe73c2a9d7fea5474ee8ed/public/images/anamnesis33/example (3).jpg
--------------------------------------------------------------------------------
/public/images/anamnesis33/example (4).jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/blib-la/LoRAdo/d3138e11d6c98e8ad7fe73c2a9d7fea5474ee8ed/public/images/anamnesis33/example (4).jpg
--------------------------------------------------------------------------------
/training/.gitkeep:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/blib-la/LoRAdo/d3138e11d6c98e8ad7fe73c2a9d7fea5474ee8ed/training/.gitkeep
--------------------------------------------------------------------------------
/tsconfig.json:
--------------------------------------------------------------------------------
1 | {
2 | "compilerOptions": {
3 | "target": "es5",
4 | "lib": ["dom", "dom.iterable", "esnext"],
5 | "allowJs": true,
6 | "skipLibCheck": true,
7 | "strict": true,
8 | "forceConsistentCasingInFileNames": true,
9 | "noEmit": true,
10 | "esModuleInterop": true,
11 | "module": "esnext",
12 | "moduleResolution": "bundler",
13 | "resolveJsonModule": true,
14 | "isolatedModules": true,
15 | "jsx": "preserve",
16 | "incremental": true,
17 | "paths": {
18 | "@/*": ["./client/*"],
19 | "~/*": ["./public/*"]
20 | }
21 | },
22 | "include": ["next-env.d.ts", "**/*.ts", "**/*.tsx"],
23 | "exclude": ["node_modules"]
24 | }
25 |
--------------------------------------------------------------------------------