├── .editorconfig ├── .env.local.example ├── .eslintrc.json ├── .github ├── CODE_OF_CONDUCT.md ├── CONTRIBUTING.md ├── ISSUE_TEMPLATE │ ├── BUG_REPORT.md │ └── FEATURE_REQUEST.md ├── PULL_REQUEST_TEMPLATE.md ├── SECURITY.md └── workflows │ ├── build_pr_ubuntu.yml │ └── build_pr_win.yml ├── .gitignore ├── .idea ├── .gitignore ├── lorado.iml └── modules.xml ├── .node-version ├── .nvmrc ├── .prettierignore ├── .prettierrc.json ├── CHANGELOG.md ├── LICENSE ├── README.md ├── assets ├── crop.png ├── folder.png ├── generations │ ├── example-1.png │ ├── example-10.png │ ├── example-11.png │ ├── example-2.png │ ├── example-3.png │ ├── example-4.png │ ├── example-5.png │ ├── example-6.png │ ├── example-7.png │ ├── example-8.png │ └── example-9.png ├── projects.png ├── slideshow.png └── ui.png ├── client ├── components │ ├── FaceDetectionImage.tsx │ ├── FileUpload.tsx │ ├── Header.tsx │ ├── ImageItem.tsx │ ├── ImageWithErrorHandling.tsx │ ├── Layout.tsx │ ├── Masonry.tsx │ ├── ProjectsTable.tsx │ ├── SlideshowModal.tsx │ └── SynchedSliderInput.tsx ├── data │ ├── exampleImages.ts │ └── kohyaConfig.ts ├── services │ └── prepare │ │ ├── crop.ts │ │ ├── prepare.ts │ │ ├── reg.ts │ │ ├── sizes.ts │ │ └── utils.ts ├── types.ts └── utils │ ├── samples.ts │ └── traverseFileTree.ts ├── next.config.js ├── package-lock.json ├── package.json ├── pages ├── _app.tsx ├── _document.tsx ├── api │ ├── image │ │ ├── delete │ │ │ └── index.ts │ │ ├── edit.ts │ │ └── upload.ts │ ├── prepare.ts │ ├── projects │ │ └── index.ts │ └── uploads │ │ └── [...args].ts ├── index.tsx ├── projects │ └── [id].tsx └── test │ └── face-detect.tsx ├── public ├── face-api │ └── models │ │ ├── age_gender_model-weights_manifest.json │ │ ├── age_gender_model.bin │ │ ├── face_expression_model-weights_manifest.json │ │ ├── face_expression_model.bin │ │ ├── face_landmark_68_model-weights_manifest.json │ │ ├── face_landmark_68_model.bin │ │ ├── face_landmark_68_tiny_model-weights_manifest.json │ │ ├── face_landmark_68_tiny_model.bin │ │ ├── face_recognition_model-weights_manifest.json │ │ ├── face_recognition_model.bin │ │ ├── ssd_mobilenetv1_model-weights_manifest.json │ │ ├── ssd_mobilenetv1_model.bin │ │ ├── tiny_face_detector_model-weights_manifest.json │ │ └── tiny_face_detector_model.bin ├── icons │ ├── android-chrome-192x192.png │ ├── android-chrome-512x512.png │ ├── apple-touch-icon.png │ ├── browserconfig.xml │ ├── favicon-16x16.png │ ├── favicon-32x32.png │ ├── favicon.ico │ ├── mstile-144x144.png │ ├── mstile-150x150.png │ ├── mstile-310x150.png │ ├── mstile-310x310.png │ └── mstile-70x70.png └── images │ └── anamnesis33 │ ├── example (1).jpg │ ├── example (2).jpg │ ├── example (3).jpg │ └── example (4).jpg ├── training └── .gitkeep └── tsconfig.json /.editorconfig: -------------------------------------------------------------------------------- 1 | root = true 2 | 3 | [*] 4 | charset = utf-8 5 | indent_style = tab 6 | indent_size = 4 7 | max_line_length = 100 8 | end_of_line = lf 9 | trim_trailing_whitespace = true 10 | insert_final_newline = true 11 | 12 | # trailing spaces in markdown indicate word wrap 13 | [*.md] 14 | trim_trailing_whitespace = false 15 | 16 | 17 | [{*.json,*.md,*.yml,.*rc,.*config}] 18 | indent_style = space 19 | 20 | [{*.json,*.yml,.*rc,.*config}] 21 | indent_size = 2 22 | -------------------------------------------------------------------------------- /.env.local.example: -------------------------------------------------------------------------------- 1 | UNSPLASH_ACCESS_KEY= 2 | -------------------------------------------------------------------------------- /.eslintrc.json: -------------------------------------------------------------------------------- 1 | { 2 | "root": true, 3 | "parser": "@typescript-eslint/parser", 4 | "extends": ["typescript", "eslint:recommended", "xo", "plugin:prettier/recommended"], 5 | "env": { 6 | "browser": true, 7 | "es2022": true, 8 | "jest": true, 9 | "node": true 10 | }, 11 | "globals": { 12 | "BufferEncoding": "readonly" 13 | }, 14 | "plugins": ["@typescript-eslint", "unicorn", "unused-imports", "import", "jest", "prettier"], 15 | "ignorePatterns": ["*.d.ts"], 16 | "rules": { 17 | "import/order": [ 18 | "error", 19 | { 20 | "alphabetize": { 21 | "order": "asc", 22 | "caseInsensitive": true 23 | }, 24 | "newlines-between": "always" 25 | } 26 | ], 27 | "@typescript-eslint/no-explicit-any": "warn", 28 | "no-unused-vars": "off", 29 | "@typescript-eslint/no-unused-vars": "off", 30 | "unused-imports/no-unused-imports": "error", 31 | "unused-imports/no-unused-vars": [ 32 | "warn", 33 | { "vars": "all", "varsIgnorePattern": "^_", "args": "after-used", "argsIgnorePattern": "^_" } 34 | ], 35 | "curly": "error", 36 | "no-nested-ternary": "error", 37 | "no-multiple-empty-lines": "error", 38 | "object-curly-spacing": ["error", "always"], 39 | "prettier/prettier": "error", 40 | "arrow-body-style": ["error", "as-needed"], 41 | "@typescript-eslint/consistent-type-imports": "warn", 42 | "no-await-in-loop": "warn" 43 | }, 44 | "overrides": [ 45 | { 46 | "files": "types.ts", 47 | "rules": { 48 | "no-unused-vars": "off" 49 | } 50 | } 51 | ] 52 | } 53 | -------------------------------------------------------------------------------- /.github/CODE_OF_CONDUCT.md: -------------------------------------------------------------------------------- 1 | # Contributor Covenant Code of Conduct 2 | 3 | ## Our Pledge 4 | 5 | We as members, contributors, and leaders pledge to make participation in our 6 | community a harassment-free experience for everyone, regardless of age, body 7 | size, visible or invisible disability, ethnicity, sex characteristics, gender 8 | identity and expression, level of experience, education, socio-economic status, 9 | nationality, personal appearance, race, religion, or sexual identity 10 | and orientation. 11 | 12 | We pledge to act and interact in ways that contribute to an open, welcoming, 13 | diverse, inclusive, and healthy community. 14 | 15 | ## Our Standards 16 | 17 | Examples of behavior that contributes to a positive environment for our 18 | community include: 19 | 20 | * Demonstrating empathy and kindness toward other people 21 | * Being respectful of differing opinions, viewpoints, and experiences 22 | * Giving and gracefully accepting constructive feedback 23 | * Accepting responsibility and apologizing to those affected by our mistakes, 24 | and learning from the experience 25 | * Focusing on what is best not just for us as individuals, but for the 26 | overall community 27 | 28 | Examples of unacceptable behavior include: 29 | 30 | * The use of sexualized language or imagery, and sexual attention or 31 | advances of any kind 32 | * Trolling, insulting or derogatory comments, and personal or political attacks 33 | * Public or private harassment 34 | * Publishing others' private information, such as a physical or email 35 | address, without their explicit permission 36 | * Other conduct which could reasonably be considered inappropriate in a 37 | professional setting 38 | 39 | ## Enforcement Responsibilities 40 | 41 | Community leaders are responsible for clarifying and enforcing our standards of 42 | acceptable behavior and will take appropriate and fair corrective action in 43 | response to any behavior that they deem inappropriate, threatening, offensive, 44 | or harmful. 45 | 46 | Community leaders have the right and responsibility to remove, edit, or reject 47 | comments, commits, code, wiki edits, issues, and other contributions that are 48 | not aligned to this Code of Conduct, and will communicate reasons for moderation 49 | decisions when appropriate. 50 | 51 | ## Scope 52 | 53 | This Code of Conduct applies within all community spaces, and also applies when 54 | an individual is officially representing the community in public spaces. 55 | Examples of representing our community include using an official e-mail address, 56 | posting via an official social media account, or acting as an appointed 57 | representative at an online or offline event. 58 | 59 | ## Enforcement 60 | 61 | Instances of abusive, harassing, or otherwise unacceptable behavior may be 62 | reported to the community leaders responsible for enforcement at 63 | . 64 | All complaints will be reviewed and investigated promptly and fairly. 65 | 66 | All community leaders are obligated to respect the privacy and security of the 67 | reporter of any incident. 68 | 69 | ## Enforcement Guidelines 70 | 71 | Community leaders will follow these Community Impact Guidelines in determining 72 | the consequences for any action they deem in violation of this Code of Conduct: 73 | 74 | ### 1. Correction 75 | 76 | **Community Impact**: Use of inappropriate language or other behavior deemed 77 | unprofessional or unwelcome in the community. 78 | 79 | **Consequence**: A private, written warning from community leaders, providing 80 | clarity around the nature of the violation and an explanation of why the 81 | behavior was inappropriate. A public apology may be requested. 82 | 83 | ### 2. Warning 84 | 85 | **Community Impact**: A violation through a single incident or series 86 | of actions. 87 | 88 | **Consequence**: A warning with consequences for continued behavior. No 89 | interaction with the people involved, including unsolicited interaction with 90 | those enforcing the Code of Conduct, for a specified period of time. This 91 | includes avoiding interactions in community spaces as well as external channels 92 | like social media. Violating these terms may lead to a temporary or 93 | permanent ban. 94 | 95 | ### 3. Temporary Ban 96 | 97 | **Community Impact**: A serious violation of community standards, including 98 | sustained inappropriate behavior. 99 | 100 | **Consequence**: A temporary ban from any sort of interaction or public 101 | communication with the community for a specified period of time. No public or 102 | private interaction with the people involved, including unsolicited interaction 103 | with those enforcing the Code of Conduct, is allowed during this period. 104 | Violating these terms may lead to a permanent ban. 105 | 106 | ### 4. Permanent Ban 107 | 108 | **Community Impact**: Demonstrating a pattern of violation of community 109 | standards, including sustained inappropriate behavior, harassment of an 110 | individual, or aggression toward or disparagement of classes of individuals. 111 | 112 | **Consequence**: A permanent ban from any sort of public interaction within 113 | the community. 114 | 115 | ## Attribution 116 | 117 | This Code of Conduct is adapted from the [Contributor Covenant][homepage], 118 | version 2.0, available at 119 | https://www.contributor-covenant.org/version/2/0/code_of_conduct.html. 120 | 121 | Community Impact Guidelines were inspired by [Mozilla's code of conduct 122 | enforcement ladder](https://github.com/mozilla/diversity). 123 | 124 | [homepage]: https://www.contributor-covenant.org 125 | 126 | For answers to common questions about this code of conduct, see the FAQ at 127 | https://www.contributor-covenant.org/faq. Translations are available at 128 | https://www.contributor-covenant.org/translations. 129 | -------------------------------------------------------------------------------- /.github/CONTRIBUTING.md: -------------------------------------------------------------------------------- 1 | # Contributing 2 | 3 | When contributing to this repository, please first discuss the change you wish to make via issue, 4 | email, or any other method with the owners of this repository before making a change. 5 | 6 | Please note we have a code of conduct, please follow it in all your interactions with the project. 7 | 8 | ## Pull Request Process 9 | 10 | Ensure any install or build dependencies are removed before the end of the layer when doing a build. 11 | Fork the repository and create a new branch (feature/my-feature) Commit changes following the 12 | "conventional-changelog" rules. Do not modify any versions manually. Don't build new versions. Use 13 | the PULL_REQUEST_TEMPLATE 14 | 15 | ## Reporting issues 16 | 17 | Ensure any install or build dependencies are removed before the end of the layer when doing a build. 18 | Create a new issue (bug/some-bug) Always list "yarn version", "node version" Use the ISSUE_TEMPLATE 19 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/BUG_REPORT.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: Bug report 3 | about: Create a report to help us improve 4 | title: "[BUG]: …" 5 | labels: "" 6 | assignees: "" 7 | --- 8 | 9 | **Describe the bug** 10 | 11 | 12 | 13 | **Expected behavior** 14 | 15 | 16 | **Screenshots** 17 | 18 | 19 | **Versions (please complete the following information):** 20 | 21 | - Browser: [e.g. Chrome 115.0.5790.171] 22 | - Node.js: [e.g. v18.14.2] 23 | - OS: [e.g. windows 11] 24 | - LoRAdo [e.g. 0.1.0] 25 | 26 | **Additional context** 27 | 28 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/FEATURE_REQUEST.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: Feature request 3 | about: Suggest an idea for this project 4 | title: "" 5 | labels: "" 6 | assignees: "" 7 | --- 8 | 9 | **Is your feature request related to a problem? Please describe.** A clear and concise description 10 | of what the problem is. Ex. I'm always frustrated when [...] 11 | 12 | **Describe the solution you'd like** A clear and concise description of what you want to happen. 13 | 14 | **Describe alternatives you've considered** A clear and concise description of any alternative 15 | solutions or features you've considered. 16 | 17 | **Additional context** Add any other context or screenshots about the feature request here. 18 | -------------------------------------------------------------------------------- /.github/PULL_REQUEST_TEMPLATE.md: -------------------------------------------------------------------------------- 1 | ## Motivation 2 | 3 | 4 | 5 | ## Issues closed 6 | 7 | 8 | -------------------------------------------------------------------------------- /.github/SECURITY.md: -------------------------------------------------------------------------------- 1 | # Security Policy 2 | 3 | ## Supported Versions 4 | 5 | Use this section to tell people about which versions of your project are currently being supported 6 | with security updates. 7 | 8 | | Version | Supported | 9 | |---------| ------------------ | 10 | | 0.x.x | :white_check_mark: | 11 | 12 | ## Reporting a Vulnerability 13 | 14 | Use this section to tell people how to report a vulnerability. 15 | 16 | Tell them where to go, how often they can expect to get an update on a reported vulnerability, what 17 | to expect if the vulnerability is accepted or declined, etc. 18 | -------------------------------------------------------------------------------- /.github/workflows/build_pr_ubuntu.yml: -------------------------------------------------------------------------------- 1 | name: Build Branch on Pull Request ubuntu 2 | 3 | on: 4 | pull_request: 5 | types: [opened, synchronize, reopened] 6 | branches: 7 | - "**" 8 | push: 9 | branches: 10 | - main 11 | 12 | jobs: 13 | build-unix: 14 | name: Run Build Ubuntu 15 | runs-on: ubuntu-20.04 16 | 17 | steps: 18 | - uses: actions/checkout@v3 19 | 20 | - uses: n1hility/cancel-previous-runs@v2 21 | with: 22 | token: ${{ secrets.GITHUB_TOKEN }} 23 | 24 | - uses: actions/setup-node@v3 25 | with: 26 | node-version-file: '.nvmrc' 27 | 28 | - name: Get npm cache directory path 29 | id: npm-cache-dir-path 30 | run: echo "::set-output name=dir::$(npm config get cache)" 31 | 32 | - uses: actions/cache@v3 33 | id: cache 34 | with: 35 | path: | 36 | ${{ steps.npm-cache-dir-path.outputs.dir }} 37 | key: ${{ runner.os }}-${{ hashFiles('**/package-lock.json') }} 38 | 39 | - run: npm ci 40 | 41 | - run: npm run build 42 | -------------------------------------------------------------------------------- /.github/workflows/build_pr_win.yml: -------------------------------------------------------------------------------- 1 | name: Build Branch on Pull Request Win 2 | 3 | on: 4 | pull_request: 5 | types: [opened, synchronize, reopened] 6 | branches: 7 | - "**" 8 | push: 9 | branches: 10 | - main 11 | 12 | jobs: 13 | build-win: 14 | name: Run Build Win 15 | runs-on: windows-2022 16 | 17 | steps: 18 | - uses: actions/checkout@v3 19 | 20 | - uses: n1hility/cancel-previous-runs@v2 21 | with: 22 | token: ${{ secrets.GITHUB_TOKEN }} 23 | 24 | - uses: actions/setup-node@v3 25 | with: 26 | node-version-file: '.nvmrc' 27 | 28 | - name: Get npm cache directory path 29 | id: npm-cache-dir-path 30 | run: echo "::set-output name=dir::$(npm config get cache)" 31 | 32 | - uses: actions/cache@v3 33 | id: cache 34 | with: 35 | path: | 36 | ${{ steps.npm-cache-dir-path.outputs.dir }} 37 | key: ${{ runner.os }}-${{ hashFiles('**/package-lock.json') }} 38 | 39 | - run: npm ci 40 | 41 | - run: npm run build 42 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # See https://help.github.com/articles/ignoring-files/ for more about ignoring files. 2 | 3 | /venv/ 4 | /data/ 5 | __pycache__/ 6 | /weights/ 7 | /wandb/ 8 | 9 | test_generation.ipynb 10 | # dependencies 11 | /node_modules 12 | /.pnp 13 | .pnp.js 14 | 15 | # testing 16 | /coverage 17 | 18 | # next.js 19 | /.next/ 20 | /out/ 21 | 22 | # production 23 | /build 24 | 25 | # misc 26 | .DS_Store 27 | *.pem 28 | 29 | # debug 30 | npm-debug.log* 31 | yarn-debug.log* 32 | yarn-error.log* 33 | 34 | # local env files 35 | .env*.local 36 | 37 | # vercel 38 | .vercel 39 | 40 | # typescript 41 | *.tsbuildinfo 42 | next-env.d.ts 43 | 44 | # intellij 45 | .idea 46 | 47 | # vscode 48 | .vscode/ 49 | 50 | # training data 51 | /training/* 52 | !/training/.gitkeep 53 | 54 | # models 55 | /models/*/* 56 | !/models/*/.gitkeep 57 | -------------------------------------------------------------------------------- /.idea/.gitignore: -------------------------------------------------------------------------------- 1 | # Default ignored files 2 | /shelf/ 3 | /workspace.xml 4 | # Editor-based HTTP Client requests 5 | /httpRequests/ 6 | -------------------------------------------------------------------------------- /.idea/lorado.iml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | -------------------------------------------------------------------------------- /.idea/modules.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | -------------------------------------------------------------------------------- /.node-version: -------------------------------------------------------------------------------- 1 | v18.5.0 2 | -------------------------------------------------------------------------------- /.nvmrc: -------------------------------------------------------------------------------- 1 | v18.5.0 2 | -------------------------------------------------------------------------------- /.prettierignore: -------------------------------------------------------------------------------- 1 | CHANGELOG.md 2 | -------------------------------------------------------------------------------- /.prettierrc.json: -------------------------------------------------------------------------------- 1 | { 2 | "trailingComma": "es5", 3 | "semi": true, 4 | "singleQuote": false, 5 | "quoteProps": "as-needed", 6 | "jsxSingleQuote": false, 7 | "bracketSpacing": true, 8 | "arrowParens": "avoid", 9 | "proseWrap": "always", 10 | "endOfLine":"auto" 11 | } 12 | -------------------------------------------------------------------------------- /CHANGELOG.md: -------------------------------------------------------------------------------- 1 | # [0.1.0-alpha.8](https://github.com/failfa-st/LoRAdo/compare/v0.1.0-alpha.1...v0.1.0-alpha.8) (2023-08-18) 2 | 3 | 4 | ### Features 5 | 6 | * **upload:** implement better image loading ([#11](https://github.com/failfa-st/LoRAdo/issues/11)) ([1a8958a](https://github.com/failfa-st/LoRAdo/commit/1a8958aebddcc636e9b70347eb57ee21dd6fbc6c)) 7 | 8 | 9 | 10 | # 0.1.0-alpha.7 (2023-08-17) 11 | 12 | 13 | ### Bug Fixes 14 | 15 | * EXDEV ([708ab9d](https://github.com/failfa-st/LoRAdo/commit/708ab9dfb7627b4118fb0ee646b9581222ae1214)) 16 | 17 | 18 | # 0.1.0-alpha.6 (2023-08-17) 19 | 20 | 21 | ### Features 22 | 23 | * better masonry ([1c165ad](https://github.com/failfa-st/LoRAdo/commit/1c165adda2994b676fa990d0232bd1b25d1bb2af)) 24 | 25 | 26 | 27 | # 0.1.0-alpha.5 (2023-08-17) 28 | 29 | 30 | ### Features 31 | 32 | * add support for SD1.5 ([c087459](https://github.com/failfa-st/LoRAdo/commit/c0874596f3e745d88701bd2a9099d954ccd95a78)) 33 | 34 | 35 | # 0.1.0-alpha.4 (2023-08-17) 36 | 37 | 38 | ### Bug Fixes 39 | 40 | * rendering lag ([1082bc2](https://github.com/failfa-st/LoRAdo/commit/1082bc27405f5202e9daa0084156fa73d8f4a34d)) 41 | 42 | # 0.1.0-alpha.3 (2023-08-16) 43 | 44 | 45 | ### Features 46 | 47 | * ad image uploading ([a93fbdc](https://github.com/failfa-st/LoRAdo/commit/a93fbdcfdf8877739c42eb4ed4e2ec467013d0e0)) 48 | * add dataset creation ([a486936](https://github.com/failfa-st/LoRAdo/commit/a4869368502c49cc3766c245f06dad084a4ef0f8)) 49 | * add sample prompts ([9b4efb7](https://github.com/failfa-st/LoRAdo/commit/9b4efb7839d8fb9de11818cffeeb4d1d7cf6994d)) 50 | * allow creating new captions ([08c5810](https://github.com/failfa-st/LoRAdo/commit/08c5810b6b06b1db70f0147ecbe8df6a1557483c)) 51 | * basic UI ([686e438](https://github.com/failfa-st/LoRAdo/commit/686e43844e25d33606d470307dc91465a6032e21)) 52 | * calculate repeats ([c0d8300](https://github.com/failfa-st/LoRAdo/commit/c0d83002b12df579bd7eee946416f1ad4d9d8ae7)) 53 | * projects ([f07bc41](https://github.com/failfa-st/LoRAdo/commit/f07bc41e2d0df8d036c6734eead8e37cd1de2954)) 54 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # LoRAdo 2 | 3 | [![Discord](https://img.shields.io/discord/1091306623819059300?color=7289da&label=Discord&logo=discord&logoColor=fff&style=for-the-badge)](https://discord.com/invite/m3TBB9XEkb) 4 | 5 | Prepare datasets for [Kohya_ss](https://github.com/bmaltais/kohya_ss/) (a UI for 6 | https://github.com/kohya-ss/sd-scripts.) 7 | 8 | > We plan to integrate training directly into this tool 9 | 10 | 11 | 12 | - [Intro](#intro) 13 | - [Why?](#why) 14 | - [Screenshots](#screenshots) 15 | - [Features](#features) 16 | - [Getting Started with LoRAdo](#getting-started-with-lorado) 17 | - [Update LoRAdo](#update-lorado) 18 | - [Using LoRAdo](#using-lorado) 19 | - [How LoRAdo Works](#how-lorado-works) 20 | - [Generation examples](#generation-examples) 21 | 22 | 23 | 24 | ## Intro 25 | 26 | Welcome to LoRAdo. This toolkit is designed to streamline the process of LoRAs image generation. 27 | Suitable for both beginners and experienced developers, it simplifies dataset creation. Initially 28 | designed with portraits in mind, LoRAdo is adaptable and works well for various subjects. Key 29 | features include face detection and user-friendly cropping tools, ensuring a practical balance 30 | between user experience and results. 31 | 32 | With LoRAdo, creating datasets for image training is straightforward and efficient. 33 | 34 | ## Why? 35 | 36 | We wanted to provide an easy way to train LoRAs for different use cases. During research, we decided 37 | to build a few small helper functions to help us gather images and crop them as well as create 38 | caption files. This UI offers an easy way to create LoRAs without overthinking it. 39 | 40 | ## Screenshots 41 | 42 | 43 | 44 | 49 | 50 | 55 | 56 | 57 | 58 | 63 | 68 | 69 | 70 |
45 | user interface 46 |
47 | The user interface 48 |
51 | slideshow 52 |
53 | View and configure images 54 |
59 | cropped images 60 |
61 | Project view 62 |
64 | folders 65 |
66 | The prepared folder 67 |
71 | 72 | ## Features 73 | 74 | - Dataset creation 75 | - Captioning tools 76 | - Create and edit projects 77 | - support for SDXL and SD-1.5 78 | - SDXL or SD-1.5 resolution adjustment 79 | - Multiple image resolutions 80 | - [Kohya_ss (UI)](https://github.com/bmaltais/kohya_ss/) config creator 81 | - Regularisation image tools (WIP) 82 | 83 | ## Getting Started with LoRAdo 84 | 85 | 1. **Prerequisites:** 86 | 87 | - Ensure you have `node.js (v18.5.0)` installed. If not, you can download it from 88 | [Node.js official site](https://nodejs.org/). ([use a node version manager](#node-version-managers)) to quickly switch to a different version. 89 | - There is an open issue with tensorflow. Please make sure to install v18 (not above). The 90 | recommended version can be found in [.nvmrc](.nvmrc) 91 | - If there is a problem with tensorflow, please check 92 | [this issue](https://github.com/failfa-st/LoRAdo/issues/1) 93 | 94 | 2. **Clone and Install Dependencies:** 95 | 96 | ```bash 97 | git clone https://github.com/failfa-st/LoRAdo.git 98 | cd LoRAdo 99 | npm install 100 | ``` 101 | 102 | 3. **Running the App:** 103 | 104 | ```bash 105 | npm run build 106 | npm run start 107 | ``` 108 | 109 | Your app should now be running on [http://localhost:3000](http://localhost:3000). Navigate to 110 | this URL in your browser to explore the LoRAdo UI. 111 | 112 | ## Update LoRAdo 113 | 114 | 1. **Stop LoRAdo if it's already running** 115 | 116 | - CTRL+C to terminate the process in the terminal 117 | 118 | 2. **Update the code and dependencies:** 119 | 120 | ```bash 121 | git fetch origin 122 | git reset --hard origin/main 123 | npm install 124 | ``` 125 | 126 | 3. **Run the App:** 127 | 128 | - Execute the commands as shown under _Running the App_ above 129 | 130 | ## Using LoRAdo 131 | 132 | 1. **Step 1:** Begin by uploading your chosen image(s). 133 | 2. **Step 2:** Let LoRAdo's face detection feature scan the image. 134 | - If a face is detected, the system identifies it as a potential focal point for cropping. 135 | - If no face is detected, evaluate if the image is suitable for your needs. 136 | 3. **Step 3:** Add captions to your images directly within the app, enhancing the context and 137 | meaning of each. 138 | 4. **Step 4:** Choose your cropping method: 139 | - Opt for minimal cropping by finding the best-fitting resolution based on the original aspect 140 | ratio, while still considering the detected focal point. 141 | - Or, generate 9 SDXL (3 SD-1.5 respectively) compatible crops for each image. 142 | 5. **Step 5:** With a single click on "prepare", transform your curated images into a dataset , 143 | compatible to [Kohya_ss](https://github.com/bmaltais/kohya_ss/). 144 | 6. **Step 6:** Navigate to the [training folder](training): 145 | - Every dataset here comes with its own `config.json` file. 146 | - These configurations are primed for integration into Kohya_ss. 147 | 148 | ## How LoRAdo Works 149 | 150 | At the heart of LoRAdo is the intent to simplify the complex. 151 | 152 | 1. **Image Optimization:** Start with any image. LoRAdo smartly crops it to fit the ideal SDXL 153 | resolution. For images with faces, they become the focal point, ensuring the best visual result. 154 | Without a face? No worries, we center-crop it perfectly. 155 | 156 | 2. **Multiple Resolutions:** Beyond the default, you have the option to generate images in all 9 157 | SDXL resolutions, as an attempt to increase flexibility without compromising on quality. 158 | 159 | 3. **Configuration Made Easy:** LoRAdo generates a configuration file tailored for Kohya_ss. Once 160 | your project is set, integrating it with Kohya_ss is a breeze. Simply load the config under the 161 | LoRA tab, and you're all set to begin training. And if you wish, Kohya_ss offers more granular 162 | configuration options. 163 | 164 | While our approach is guided by a strong opinion on usability, it still leaves room for 165 | customization. As we evolve, expect enhanced configurability. But rest assured, simplicity will 166 | always be our guiding star, ensuring that interacting with LoRAdo remains intuitive. 167 | 168 | Example images via [@anamnesis33](https://unsplash.com/@anamnesis33) 169 | 170 | | Image | Link | 171 | | ------------------------------------------------------------------------------------------------------------------ | --------------------------------------- | 172 | | []() | https://unsplash.com/photos/mqcYKihgfAo | 173 | | []() | https://unsplash.com/photos/06TuQM7RSP4 | 174 | | []() | https://unsplash.com/photos/AUJhl146mBY | 175 | | []() | https://unsplash.com/photos/8OWttYqN47I | 176 | 177 | This tool provides an opinionated configuration and approach to training flexible LoRAs. We are 178 | constantly researching to improve the default settings provided by this tool. Advanced (detailed) 179 | configuration is planned for future releases. 180 | 181 | ## Generation examples 182 | 183 | These examples were generated from a LoRA, trained on a dataset that was prepared with this 184 | tool/approach 185 | 186 | [](assets/generations/example-1.png) 187 | [](assets/generations/example-2.png) 188 | [](assets/generations/example-3.png) 189 | [](assets/generations/example-4.png) 190 | [](assets/generations/example-6.png) 191 | [](assets/generations/example-7.png) 192 | [](assets/generations/example-8.png) 193 | [](assets/generations/example-9.png) 194 | [](assets/generations/example-10.png) 195 | [](assets/generations/example-11.png) 196 | 197 | **Feedback and Support:** Encountered an issue or have a suggestion? Join our 198 | [Discord community](https://discord.com/invite/m3TBB9XEkb) or open an issue on GitHub. We'd love to 199 | hear from you! 200 | 201 | ## Node Version Managers 202 | 203 | Node.js has a [list of alternative installation options](https://nodejs.org/en/download/package-manager#nvm). 204 | 205 | **Cross Platform** 206 | 207 | - [fnm](https://github.com/Schniz/fnm) 208 | 209 | **Windows** 210 | 211 | - [nvm-windows](https://github.com/coreybutler/nvm-windows) 212 | - [nodist](https://github.com/marcelklehr/nodist) 213 | - [nvs](https://github.com/jasongin/nvs) 214 | 215 | **macOS/Linux** 216 | 217 | - [nvm](https://github.com/nvm-sh/nvm) 218 | - [n](https://github.com/tj/n) 219 | 220 | -------------------------------------------------------------------------------- /assets/crop.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/blib-la/LoRAdo/d3138e11d6c98e8ad7fe73c2a9d7fea5474ee8ed/assets/crop.png -------------------------------------------------------------------------------- /assets/folder.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/blib-la/LoRAdo/d3138e11d6c98e8ad7fe73c2a9d7fea5474ee8ed/assets/folder.png -------------------------------------------------------------------------------- /assets/generations/example-1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/blib-la/LoRAdo/d3138e11d6c98e8ad7fe73c2a9d7fea5474ee8ed/assets/generations/example-1.png -------------------------------------------------------------------------------- /assets/generations/example-10.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/blib-la/LoRAdo/d3138e11d6c98e8ad7fe73c2a9d7fea5474ee8ed/assets/generations/example-10.png -------------------------------------------------------------------------------- /assets/generations/example-11.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/blib-la/LoRAdo/d3138e11d6c98e8ad7fe73c2a9d7fea5474ee8ed/assets/generations/example-11.png -------------------------------------------------------------------------------- /assets/generations/example-2.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/blib-la/LoRAdo/d3138e11d6c98e8ad7fe73c2a9d7fea5474ee8ed/assets/generations/example-2.png -------------------------------------------------------------------------------- /assets/generations/example-3.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/blib-la/LoRAdo/d3138e11d6c98e8ad7fe73c2a9d7fea5474ee8ed/assets/generations/example-3.png -------------------------------------------------------------------------------- /assets/generations/example-4.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/blib-la/LoRAdo/d3138e11d6c98e8ad7fe73c2a9d7fea5474ee8ed/assets/generations/example-4.png -------------------------------------------------------------------------------- /assets/generations/example-5.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/blib-la/LoRAdo/d3138e11d6c98e8ad7fe73c2a9d7fea5474ee8ed/assets/generations/example-5.png -------------------------------------------------------------------------------- /assets/generations/example-6.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/blib-la/LoRAdo/d3138e11d6c98e8ad7fe73c2a9d7fea5474ee8ed/assets/generations/example-6.png -------------------------------------------------------------------------------- /assets/generations/example-7.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/blib-la/LoRAdo/d3138e11d6c98e8ad7fe73c2a9d7fea5474ee8ed/assets/generations/example-7.png -------------------------------------------------------------------------------- /assets/generations/example-8.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/blib-la/LoRAdo/d3138e11d6c98e8ad7fe73c2a9d7fea5474ee8ed/assets/generations/example-8.png -------------------------------------------------------------------------------- /assets/generations/example-9.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/blib-la/LoRAdo/d3138e11d6c98e8ad7fe73c2a9d7fea5474ee8ed/assets/generations/example-9.png -------------------------------------------------------------------------------- /assets/projects.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/blib-la/LoRAdo/d3138e11d6c98e8ad7fe73c2a9d7fea5474ee8ed/assets/projects.png -------------------------------------------------------------------------------- /assets/slideshow.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/blib-la/LoRAdo/d3138e11d6c98e8ad7fe73c2a9d7fea5474ee8ed/assets/slideshow.png -------------------------------------------------------------------------------- /assets/ui.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/blib-la/LoRAdo/d3138e11d6c98e8ad7fe73c2a9d7fea5474ee8ed/assets/ui.png -------------------------------------------------------------------------------- /client/components/FaceDetectionImage.tsx: -------------------------------------------------------------------------------- 1 | import { Box } from "@mui/joy"; 2 | import * as faceapi from "@vladmandic/face-api/dist/face-api.esm-nobundle.js"; 3 | import type { ImageProps } from "next/image"; 4 | import Image from "next/image"; 5 | import { useEffect, useRef, useState } from "react"; 6 | 7 | import type { FaceBox } from "@/types"; 8 | 9 | export default function FaceDetectionImage({ 10 | alt, 11 | onFace, 12 | noDetection, 13 | faceBox, 14 | ...props 15 | }: ImageProps & { onFace?(hasFace: FaceBox): void; noDetection?: boolean; faceBox?: FaceBox }) { 16 | const [box, setBox] = useState(faceBox); 17 | const [modelsLoaded, setModelsLoaded] = useState(false); 18 | const [imageLoaded, setImageLoaded] = useState(false); 19 | const imgRef = useRef(null); 20 | 21 | useEffect(() => { 22 | if (!noDetection) { 23 | const loadModels = async () => { 24 | const MODEL_URL = "/face-api/models"; 25 | await faceapi.nets.tinyFaceDetector.load(MODEL_URL); 26 | await faceapi.nets.faceLandmark68Net.load(MODEL_URL); 27 | await faceapi.nets.faceRecognitionNet.load(MODEL_URL); 28 | }; 29 | 30 | loadModels().then(() => { 31 | setModelsLoaded(true); 32 | }); 33 | } 34 | }, [noDetection]); 35 | 36 | useEffect(() => { 37 | const detectFace = async () => { 38 | if (imgRef.current && modelsLoaded && imageLoaded) { 39 | const detections = await faceapi.detectAllFaces( 40 | imgRef.current, 41 | new faceapi.TinyFaceDetectorOptions() 42 | ); 43 | if (detections.length > 0) { 44 | const detectionBox = detections[0].box; 45 | 46 | // Use naturalWidth and naturalHeight to get original image dimensions 47 | const xPercentage = (detectionBox.x / imgRef.current.naturalWidth) * 100; 48 | const yPercentage = (detectionBox.y / imgRef.current.naturalHeight) * 100; 49 | const widthPercentage = 50 | (detectionBox.width / imgRef.current.naturalWidth) * 100; 51 | const heightPercentage = 52 | (detectionBox.height / imgRef.current.naturalHeight) * 100; 53 | 54 | setBox({ 55 | xPercentage, 56 | yPercentage, 57 | widthPercentage, 58 | heightPercentage, 59 | }); 60 | if (onFace) { 61 | onFace({ 62 | xPercentage, 63 | yPercentage, 64 | widthPercentage, 65 | heightPercentage, 66 | }); 67 | } 68 | } else if (onFace) { 69 | setBox({ 70 | xPercentage: 10, 71 | yPercentage: 10, 72 | widthPercentage: 80, 73 | heightPercentage: 80, 74 | }); 75 | onFace({ 76 | xPercentage: 10, 77 | yPercentage: 10, 78 | widthPercentage: 80, 79 | heightPercentage: 80, 80 | }); 81 | } 82 | } 83 | }; 84 | 85 | if (!noDetection) { 86 | detectFace(); 87 | } 88 | }, [modelsLoaded, imageLoaded, noDetection]); 89 | 90 | return ( 91 | 100 | {alt} setImageLoaded(true)} 108 | /> 109 | {box && ( 110 |
122 | )} 123 |
124 | ); 125 | } 126 | -------------------------------------------------------------------------------- /client/components/FileUpload.tsx: -------------------------------------------------------------------------------- 1 | import PermMediaIcon from "@mui/icons-material/PermMedia"; 2 | import { Typography, Sheet, Box } from "@mui/joy"; 3 | import { nanoid } from "nanoid"; 4 | import type { DragEvent } from "react"; 5 | 6 | import type { ImageData } from "@/types"; 7 | import { resizeImage } from "@/utils/traverseFileTree"; 8 | interface FileUploadProps { 9 | min: number; 10 | ok: number; 11 | recommended: number; 12 | onDrop(event: DragEvent): void; 13 | onLoad(imageData: ImageData): void; 14 | } 15 | export default function FileUpload({ onDrop, onLoad, min, ok, recommended }: FileUploadProps) { 16 | return ( 17 | e.preventDefault()} 34 | > 35 | 46 | 47 | 48 | 49 | 50 | Drop files or folders here or click to select files from disk. 51 | 52 | 53 | Minimum: {min} image{min === 1 ? "" : "s"} 54 | 55 | 56 | Better: {ok} images or more 57 | 58 | 59 | Recommended: {recommended} images or more 60 | 61 | 62 | 63 | { 74 | if (event.target.files) { 75 | Array.from(event.target.files).forEach(file => { 76 | if (file.type.startsWith("image/")) { 77 | const reader = new FileReader(); 78 | reader.onload = event => { 79 | const image = new Image(); 80 | image.src = event.target!.result as string; 81 | image.onload = async () => { 82 | const maxWidth = 300; 83 | const resizedDataUrl = await resizeImage( 84 | image, 85 | maxWidth, 86 | maxWidth * (1535 / 640) // SDXL max 87 | ); 88 | onLoad({ 89 | id: nanoid(), 90 | data: image.src, 91 | src: resizedDataUrl, 92 | name: file.name, 93 | width: image.width, 94 | height: image.height, 95 | caption: "", 96 | }); 97 | }; 98 | }; 99 | 100 | reader.readAsDataURL(file); 101 | } 102 | }); 103 | } 104 | }} 105 | /> 106 | 107 | ); 108 | } 109 | -------------------------------------------------------------------------------- /client/components/Header.tsx: -------------------------------------------------------------------------------- 1 | import { Option, Select, Sheet } from "@mui/joy"; 2 | import { useRouter } from "next/router"; 3 | 4 | export function plural(word: string, count: number, singular = "", plural = "s") { 5 | if (count === 1) { 6 | return "1 " + word; 7 | } 8 | 9 | if (singular === "") { 10 | return `${count} ${word + plural}`; 11 | } 12 | 13 | return `${count} ${word.replace(new RegExp(`${singular}$`), plural)}`; 14 | } 15 | 16 | export function Header({ directories }: { directories: { fullPath: string; id: string }[] }) { 17 | const { 18 | push, 19 | query: { id = "new" }, 20 | } = useRouter(); 21 | return ( 22 | 34 | {plural("project", directories.length)} 35 | 50 | 51 | ); 52 | } 53 | -------------------------------------------------------------------------------- /client/components/ImageItem.tsx: -------------------------------------------------------------------------------- 1 | import CheckIcon from "@mui/icons-material/CheckCircle"; 2 | import DeleteIcon from "@mui/icons-material/Delete"; 3 | import FileUploadIcon from "@mui/icons-material/FileUpload"; 4 | import SaveIcon from "@mui/icons-material/Save"; 5 | import { 6 | IconButton, 7 | Typography, 8 | Card, 9 | CardContent, 10 | Box, 11 | Textarea, 12 | FormControl, 13 | FormLabel, 14 | FormHelperText, 15 | CircularProgress, 16 | useTheme, 17 | } from "@mui/joy"; 18 | import dynamic from "next/dynamic"; 19 | import type { ChangeEventHandler } from "react"; 20 | import { useState } from "react"; 21 | 22 | import type { FaceBox, ImageData } from "@/types"; 23 | 24 | const FaceDetectionImage = dynamic(() => import("@/components/FaceDetectionImage"), { 25 | ssr: false, 26 | }); 27 | export interface ImageItemProps { 28 | upload?: boolean; 29 | modified?: boolean; 30 | demo?: boolean; 31 | image: ImageData; 32 | onRemove?: () => void; 33 | onSave?: () => void; 34 | onOpen?: () => void; 35 | onFace?: (faceBox: FaceBox) => void; 36 | onCaptionChange?: ChangeEventHandler; 37 | } 38 | 39 | export function StateIcon({ loading, done }: { loading?: boolean; done?: boolean }) { 40 | const theme = useTheme(); 41 | if (loading) { 42 | return ; 43 | } 44 | 45 | return done ? ( 46 | 47 | ) : ( 48 | 49 | ); 50 | } 51 | 52 | export default function ImageItem({ 53 | image, 54 | demo, 55 | modified, 56 | onRemove, 57 | onSave, 58 | onOpen, 59 | onFace, 60 | onCaptionChange, 61 | }: ImageItemProps) { 62 | const [faceDetection, setFaceDetection] = useState(Boolean(image.faceBox)); 63 | 64 | const hasGoodSize = Math.min(image.width, image.height) >= 1536; 65 | 66 | return ( 67 | 77 |
78 | { 79 | 83 | } 84 | sx={{ mr: 6 }} 85 | > 86 | {image.name} 87 | 88 | } 89 | 90 | {onRemove && !modified && ( 91 | 99 | 100 | 101 | )} 102 | {onSave && modified && ( 103 | 111 | 112 | 113 | )} 114 |
115 | 116 |
117 | 118 | Dimensions: {image.width}x{image.height} 119 | 120 |
121 |
122 | 135 | { 147 | setFaceDetection(true); 148 | if (onFace) { 149 | onFace(faceBox); 150 | } 151 | }} 152 | /> 153 | 154 | 155 | Caption 156 |