├── rfcs ├── put_rfcs_here.txt ├── 0000-corenodes.md ├── 0003-widget-input-socket.md ├── 0006-trainer.md ├── 0004-widget-values-format.md ├── 0001-object_info_v2.md ├── 0002-litegraph_native_reroute.md └── 0005-subgraph.md ├── specifications ├── api.py ├── README.md ├── pyproject.toml ├── api.js ├── node_def.py └── node_def.json ├── .github ├── ISSUE_TEMPLATE │ └── config.yml ├── pull_request_template.md └── workflows │ ├── rfc-discussion.yml │ └── sync-rfc-discussion.yml ├── 0000-template.md ├── .gitignore └── README.md /rfcs/put_rfcs_here.txt: -------------------------------------------------------------------------------- 1 | Please put your RFCs here. -------------------------------------------------------------------------------- /specifications/api.py: -------------------------------------------------------------------------------- 1 | from abc import ABC, abstractmethod 2 | 3 | ''' 4 | ComfyUI API available to custom nodes. 5 | ''' 6 | class ComfyUI(ABC): 7 | @abstractmethod 8 | def some_method(self): 9 | pass 10 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/config.yml: -------------------------------------------------------------------------------- 1 | blank_issues_enabled: false 2 | contact_links: 3 | - name: 🙏 Question or Help 4 | url: https://github.com/Comfy-Org/rfcs/discussions/new 5 | about: If you have a question or need help, ask a question on the discussion forums. 6 | -------------------------------------------------------------------------------- /specifications/README.md: -------------------------------------------------------------------------------- 1 | # ComfyUI Current Specifications 2 | 3 | This directory contains the current version of ComfyUI's manifest files and API definitions. 4 | 5 | ## Files 6 | 7 | - `api.py` - Python interface for ComfyUI's backend API 8 | - `api.ts` - TypeScript interface for ComfyUI's frontend API 9 | - `node_def.py` - How to define a custom node in Python 10 | - `node_def.json` - How a custom node is saved in a workflow json. 11 | - `pyproject.toml` - Manifest file for custom node pack. 12 | -------------------------------------------------------------------------------- /specifications/pyproject.toml: -------------------------------------------------------------------------------- 1 | [project] 2 | name = "comfyui-node" # unique namespace for the node pack 3 | version = "0.1.0" 4 | description = "ComfyUI Node that does" 5 | authors = [ 6 | {name = "", email = ""} 7 | ] 8 | readme = "README.md" 9 | license = {file = "LICENSE"} 10 | dependencies = [] 11 | 12 | [project.urls] 13 | bugs = "" 14 | homepage = "" 15 | repository = "" 16 | 17 | 18 | [tool.comfy] 19 | PublisherId = "" # Registry ID 20 | DisplayName = "" # Display Name for ComfyUI Registry 21 | Icon = "" # Icon URL for ComfyUI Registry 22 | -------------------------------------------------------------------------------- /.github/pull_request_template.md: -------------------------------------------------------------------------------- 1 | ## Summary 2 | 3 | 7 | 8 | ## Links 9 | 10 | 14 | 15 | - [Full Rendered Proposal]() 16 | 17 | - [Discussion Thread]() 18 | 19 | 22 | 23 | --- 24 | 25 | **Important: Do NOT comment on this PR. Please use the discussion thread linked above to provide feedback, as it provides branched discussions that are easier to follow. This also makes the edit history of the PR clearer.** 26 | -------------------------------------------------------------------------------- /0000-template.md: -------------------------------------------------------------------------------- 1 | # RFC: \ 2 | 3 | - Start Date: (fill me in with today's date, YYYY-MM-DD) 4 | - Target Major Version: (1.x) 5 | - Reference Issues: (fill in existing related issues, if any) 6 | - Implementation PR: (leave this empty) 7 | 8 | ## Summary 9 | 10 | Brief explanation of the feature. 11 | 12 | ## Basic example 13 | 14 | If the proposal involves a new or changed API, include a basic code example. 15 | Omit this section if it's not applicable. 16 | 17 | ## Motivation 18 | 19 | Why are we doing this? What use cases does it support? What is the expected 20 | outcome? 21 | 22 | Please focus on explaining the motivation so that if this RFC is not accepted, 23 | the motivation could be used to develop alternative solutions. In other words, 24 | enumerate the constraints you are trying to solve without coupling them too 25 | closely to the solution you have in mind. 26 | 27 | ## Detailed design 28 | 29 | This is the bulk of the RFC. Explain the design in enough detail for somebody 30 | familiar with Vue to understand, and for somebody familiar with the 31 | implementation to implement. This should get into specifics and corner-cases, 32 | and include examples of how the feature is used. Any new terminology should be 33 | defined here. 34 | 35 | ## Drawbacks 36 | 37 | Why should we *not* do this? Please consider: 38 | 39 | - implementation cost, both in term of code size and complexity 40 | - whether the proposed feature can be implemented in user space 41 | - the impact on teaching people ComfyUI 42 | - integration of this feature with other existing and planned features 43 | - cost of migrating existing ComfyUI applications (is it a breaking change?) 44 | 45 | There are tradeoffs to choosing any path. Attempt to identify them here. 46 | 47 | ## Alternatives 48 | 49 | What other designs have been considered? What is the impact of not doing this? 50 | 51 | ## Adoption strategy 52 | 53 | If we implement this proposal, how will existing ComfyUI users and developers adopt it? Is 54 | this a breaking change? How will this affect other projects in the ComfyUI ecosystem? 55 | 56 | ## Unresolved questions 57 | 58 | Optional, but suggested for first drafts. What parts of the design are still 59 | TBD? 60 | -------------------------------------------------------------------------------- /specifications/api.js: -------------------------------------------------------------------------------- 1 | /** 2 | * The main app instance that you'll use to register tabs and access functionality 3 | */ 4 | export interface AppInstance { 5 | /** Use this to register new sidebar tabs */ 6 | extensionManager: ExtensionManager; 7 | /** Access the current workflow graph */ 8 | graph: Graph; 9 | /** Access the API for events and other functionality */ 10 | api: API; 11 | } 12 | 13 | /** 14 | * Configuration for a sidebar tab - use this with app.extensionManager.registerSidebarTab() 15 | */ 16 | export interface SidebarTabConfig { 17 | /** Unique identifier for the tab */ 18 | id: string; 19 | /** Icon class for the tab button (e.g., 'pi pi-compass', 'mdi mdi-robot', 'fa-solid fa-star') */ 20 | icon: string; 21 | /** Title text for the tab */ 22 | title: string; 23 | /** Optional tooltip text shown on hover */ 24 | tooltip?: string; 25 | /** Tab type (usually "custom") */ 26 | type: string; 27 | /** Function that populates the tab content. Can return a cleanup function. */ 28 | render: (element: HTMLElement) => void | (() => void); 29 | } 30 | 31 | /** 32 | * Example usage: 33 | * 34 | * ```typescript 35 | * // Basic tab registration 36 | * app.extensionManager.registerSidebarTab({ 37 | * id: "customSidebar", 38 | * icon: "pi pi-compass", 39 | * title: "Custom Tab", 40 | * tooltip: "My Custom Sidebar Tab", 41 | * type: "custom", 42 | * render: (el) => { 43 | * el.innerHTML = '
This is my custom sidebar content
'; 44 | * } 45 | * }); 46 | * 47 | * // React component example 48 | * app.extensionManager.registerSidebarTab({ 49 | * id: "reactSidebar", 50 | * icon: "mdi mdi-react", 51 | * title: "React Tab", 52 | * type: "custom", 53 | * render: (el) => { 54 | * const container = document.createElement("div"); 55 | * el.appendChild(container); 56 | * ReactDOM.createRoot(container).render(); 57 | * } 58 | * }); 59 | * ``` 60 | */ 61 | 62 | // Additional interfaces for internal use 63 | export interface ExtensionManager { 64 | registerSidebarTab(config: SidebarTabConfig): void; 65 | } 66 | 67 | export interface Graph { 68 | _nodes: any[]; 69 | links: Record; 70 | } 71 | 72 | export interface API { 73 | addEventListener(event: string, callback: () => void): void; 74 | removeEventListener(event: string, callback: () => void): void; 75 | } 76 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | *$py.class 5 | 6 | # C extensions 7 | *.so 8 | 9 | # Distribution / packaging 10 | .Python 11 | build/ 12 | develop-eggs/ 13 | dist/ 14 | downloads/ 15 | eggs/ 16 | .eggs/ 17 | lib/ 18 | lib64/ 19 | parts/ 20 | sdist/ 21 | var/ 22 | wheels/ 23 | share/python-wheels/ 24 | *.egg-info/ 25 | .installed.cfg 26 | *.egg 27 | MANIFEST 28 | 29 | # PyInstaller 30 | # Usually these files are written by a python script from a template 31 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 32 | *.manifest 33 | *.spec 34 | 35 | # Installer logs 36 | pip-log.txt 37 | pip-delete-this-directory.txt 38 | 39 | # Unit test / coverage reports 40 | htmlcov/ 41 | .tox/ 42 | .nox/ 43 | .coverage 44 | .coverage.* 45 | .cache 46 | nosetests.xml 47 | coverage.xml 48 | *.cover 49 | *.py,cover 50 | .hypothesis/ 51 | .pytest_cache/ 52 | cover/ 53 | 54 | # Translations 55 | *.mo 56 | *.pot 57 | 58 | # Django stuff: 59 | *.log 60 | local_settings.py 61 | db.sqlite3 62 | db.sqlite3-journal 63 | 64 | # Flask stuff: 65 | instance/ 66 | .webassets-cache 67 | 68 | # Scrapy stuff: 69 | .scrapy 70 | 71 | # Sphinx documentation 72 | docs/_build/ 73 | 74 | # PyBuilder 75 | .pybuilder/ 76 | target/ 77 | 78 | # Jupyter Notebook 79 | .ipynb_checkpoints 80 | 81 | # IPython 82 | profile_default/ 83 | ipython_config.py 84 | 85 | # pyenv 86 | # For a library or package, you might want to ignore these files since the code is 87 | # intended to run in multiple environments; otherwise, check them in: 88 | # .python-version 89 | 90 | # pipenv 91 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. 92 | # However, in case of collaboration, if having platform-specific dependencies or dependencies 93 | # having no cross-platform support, pipenv may install dependencies that don't work, or not 94 | # install all needed dependencies. 95 | #Pipfile.lock 96 | 97 | # UV 98 | # Similar to Pipfile.lock, it is generally recommended to include uv.lock in version control. 99 | # This is especially recommended for binary packages to ensure reproducibility, and is more 100 | # commonly ignored for libraries. 101 | #uv.lock 102 | 103 | # poetry 104 | # Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control. 105 | # This is especially recommended for binary packages to ensure reproducibility, and is more 106 | # commonly ignored for libraries. 107 | # https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control 108 | #poetry.lock 109 | 110 | # pdm 111 | # Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control. 112 | #pdm.lock 113 | # pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it 114 | # in version control. 115 | # https://pdm.fming.dev/latest/usage/project/#working-with-version-control 116 | .pdm.toml 117 | .pdm-python 118 | .pdm-build/ 119 | 120 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm 121 | __pypackages__/ 122 | 123 | # Celery stuff 124 | celerybeat-schedule 125 | celerybeat.pid 126 | 127 | # SageMath parsed files 128 | *.sage.py 129 | 130 | # Environments 131 | .env 132 | .venv 133 | env/ 134 | venv/ 135 | ENV/ 136 | env.bak/ 137 | venv.bak/ 138 | 139 | # Spyder project settings 140 | .spyderproject 141 | .spyproject 142 | 143 | # Rope project settings 144 | .ropeproject 145 | 146 | # mkdocs documentation 147 | /site 148 | 149 | # mypy 150 | .mypy_cache/ 151 | .dmypy.json 152 | dmypy.json 153 | 154 | # Pyre type checker 155 | .pyre/ 156 | 157 | # pytype static type analyzer 158 | .pytype/ 159 | 160 | # Cython debug symbols 161 | cython_debug/ 162 | 163 | # PyCharm 164 | # JetBrains specific template is maintained in a separate JetBrains.gitignore that can 165 | # be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore 166 | # and can be added to the global gitignore or merged into this file. For a more nuclear 167 | # option (not recommended) you can uncomment the following to ignore the entire idea folder. 168 | #.idea/ 169 | 170 | # PyPI configuration file 171 | .pypirc 172 | -------------------------------------------------------------------------------- /rfcs/0000-corenodes.md: -------------------------------------------------------------------------------- 1 | # RFC: Core Nodes Expansion 2 | 3 | - Start Date: 2025-02-18 4 | - Target Major Version: TBD 5 | - Reference Issues: 6 | - Implementation PR: 7 | 8 | ## Summary 9 | 10 | This RFS focuses on adding some long-requested nodes to core ComfyUI, both unifying their behavior across a bunch of custom node implementations and making the core node features more robust. Mainly, the nodes proposed here would involve string manipulation, int/float manipulation via a unified 'number' type, and preview options for masks, strings, and miscellaneous types. 11 | 12 | ## Motivation 13 | 14 | If something is implemented many times in different custom nodes with the same goal, it is clear it is a feature that should be in core. There are many math nodes, string nodes, preview nodes, etc., many with slight variations in behavior. 15 | 16 | In the past, one of the counterarguments for having basic nodes like this was to avoid ComfyUI becoming a 'visual programming' UI, but at its core that is what ComfyUI actually is - abstracting away the coding details of generative AI and leaving the bits relevant to its operation. Since these custom nodes exist anyway, that argument is void since users still have access to such features in ComfyUI. 17 | 18 | Having multiple custom nodes doing the same thing but slightly differently, or relevant nodes that should be in core being part of massive node packs, makes for a frustrating experience. Going forward, nodes that do simple things across multiple node packs should be considered for RFCs that bring them into core ComfyUI. 19 | 20 | ## Detailed design 21 | 22 | This will be more of a list of nodes considered to have a core implementation. 23 | 24 | ### Preview Nodes 25 | 26 | 1. Preview Mask 27 | 28 | This would be identical to the Preview Image node, except take mask as input. Alternatively, the existing Preview Image node could have its ```image``` input be defined as ```(IMAGE,MASK)``` so that it could accept both IMAGE and MASK. The code would simply need to be updated to support the tensor format to display it. A Preview Mask node exists in (ComfyUI_essentials)[https://github.com/cubiq/ComfyUI_essentials] node pack. 29 | 30 | 3. Preview Any 31 | 32 | Strings, integers, floats, etc. should be previewable. This is long requested and has a number of existing implementations. In the case of the SaveImages node in nodes.py, ```return { "ui": { "images": results } }``` is used to report the images that should be seen in the UI; something similar would need to be done to support strings, and then arbitrary types could also just have their string representations displayed. 33 | 34 | From a quick google search, one node pack that includes a text preview is (ComfyUI_Custom_Nodes_AlekPet)[https://github.com/AlekPet/ComfyUI_Custom_Nodes_AlekPet/blob/master/ExtrasNode/extras_node.py], with 1k+ stars. 35 | 36 | ### String Nodes 37 | 38 | While core ComfyUI uses string inputs, it technically has no nodes that output strings, and that was the reason why no core string manipulation nodes were implemented. That should change, given how often strings are used, whether for prompts or filenames. 39 | 40 | 1. Basic String Manipulation 41 | 42 | There should be an assortment of string nodes that do common, basic string operations, such as: String Concatenate, String Replace, String Trim, String Select (beginning index, end index). List is not exhaustive; more basic operations can be supported. Existing custom nodes that do this sort of thing should be looked at, to make sure core implementation supports desired features. 43 | 44 | ### Number Nodes (Math) 45 | 46 | Many, many workflows perform some math operations on ints or floats, whether to adjust cropping or get an even division of something. 47 | 48 | It would be redundant to have nodes specifically for ints and floats - ideally, 'math nodes' should output a ```NUMBER``` type that can be converted to either INT or FLOAT, and 'number' inputs would really be typed as ```(INT,FLOAT,NUMBER)``` for easy use. 49 | 50 | 1. Basic Math 51 | 52 | Examples: Add (variable inputs), Subtract (variable inputs), Multiply (variable inputs), Divide (variable inputs), Power, Floor, Ceiling, Round. List is not exhaustive; more basic operations can be added. Existing custom nodes that do this sort of thing should be looked at, to make sure core implementation supports desired features. 53 | 54 | 2. Convert From Number 55 | 56 | Takes a NUMBER as input, and has two outputs: INT and FLOAT. 57 | 58 | ## Drawbacks 59 | 60 | The primary drawback to math and string helpers is that since there are multiple node packs that already do these things, adding them in core in a way contributes to the 'yet another standard' problem. If the core ComfyUI implementation of these nodes is not satisfactory, it will result in even more nodes that do-the-same-thing-but-slightly-differently. 61 | 62 | The execution of this RFC will ultimately determine whether this drawback is valid or not. 63 | 64 | ## Alternatives 65 | 66 | Multiple custom nodes already exist that do everything this RFC proposes, in various subsets. 67 | 68 | ## Adoption strategy 69 | 70 | Existing nodes would not be impacted, but new workflows can be created to preview/manipulate strings/do math without the need for multiple custom node packs. Nodes that wish to preview strings or arbitrary types as strings would no longer need to implement their own methods of displaying values in the UI. 71 | 72 | ## Unresolved questions 73 | 74 | Optional, but suggested for first drafts. What parts of the design are still 75 | TBD? 76 | -------------------------------------------------------------------------------- /.github/workflows/rfc-discussion.yml: -------------------------------------------------------------------------------- 1 | name: Create RFC Discussion 2 | 3 | on: 4 | pull_request_target: 5 | types: [opened] 6 | paths: 7 | - 'rfcs/**.md' 8 | 9 | jobs: 10 | create-discussion: 11 | runs-on: ubuntu-latest 12 | permissions: 13 | pull-requests: write 14 | discussions: write 15 | 16 | steps: 17 | - name: Get Changed Files 18 | id: changed-files 19 | uses: actions/github-script@v7 20 | with: 21 | script: | 22 | const { data: files } = await github.rest.pulls.listFiles({ 23 | owner: context.repo.owner, 24 | repo: context.repo.repo, 25 | pull_number: context.issue.number 26 | }); 27 | 28 | core.info('All changed files: ' + JSON.stringify(files.map(f => f.filename))); 29 | 30 | const mdFile = files.find(file => file.filename.startsWith('rfcs/') && file.filename.endsWith('.md')); 31 | if (!mdFile) { 32 | throw new Error('No markdown file found in rfcs directory'); 33 | } 34 | core.info('Found markdown file: ' + mdFile.filename); 35 | core.setOutput('filename', mdFile.filename); 36 | 37 | - name: Get RFC Content 38 | id: get-rfc-content 39 | uses: actions/github-script@v7 40 | with: 41 | script: | 42 | const mdFile = '${{ steps.changed-files.outputs.filename }}'; 43 | const { data: fileContent } = await github.rest.repos.getContent({ 44 | owner: context.repo.owner, 45 | repo: context.repo.repo, 46 | path: mdFile, 47 | ref: context.payload.pull_request.head.sha 48 | }); 49 | 50 | const content = Buffer.from(fileContent.content, 'base64').toString('utf8'); 51 | core.setOutput('content', content); 52 | 53 | - name: Create a new GitHub Discussion 54 | id: create-discussion 55 | uses: abirismyname/create-discussion@v1.x 56 | env: 57 | GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} 58 | with: 59 | title: "RFC Discussion: ${{ github.event.pull_request.title }}" 60 | body: | 61 | # RFC Discussion: ${{ github.event.pull_request.title }} 62 | 63 | **Author:** @${{ github.event.pull_request.user.login }} | **Status:** 🟡 Under Review 64 | 65 | ## 📋 Quick Links 66 | - 🔧 [Source PR #${{ github.event.pull_request.number }}](${{ github.event.pull_request.html_url }}) 67 | - 📝 [View Changes](${{ github.event.pull_request.html_url }}/files) 68 | - 📖 [Rendered Proposal](https://github.com/${{ github.repository }}/blob/${{ github.event.pull_request.head.ref }}/${{ steps.changed-files.outputs.filename }}) 69 | 70 | --- 71 | 72 | ## 📄 Current Proposal 73 | 74 | > **Last Updated:** ${{ github.event.pull_request.updated_at }} 75 | > **Commit:** [`${{ github.event.pull_request.head.sha }}`](${{ github.event.pull_request.head.repo.html_url }}/commit/${{ github.event.pull_request.head.sha }}) 76 | 77 | 78 | ${{ steps.get-rfc-content.outputs.content }} 79 | 80 | 81 | --- 82 | **💬 Discussion Guidelines:** Share feedback, concerns, and suggestions below. Use reply threads to keep conversations organized. 83 | repository-id: "R_kgDONlIMAA" 84 | category-id: "DIC_kwDONlIMAM4Cl3Tj" 85 | 86 | - name: Update PR description 87 | uses: actions/github-script@v7 88 | with: 89 | github-token: ${{ secrets.GITHUB_TOKEN }} 90 | script: | 91 | const mdFile = process.env.MD_FILE; 92 | core.info('Markdown file from env: ' + mdFile); 93 | core.info('PR head ref: ' + context.payload.pull_request.head.ref); 94 | 95 | const prBody = context.payload.pull_request.body; 96 | const renderedUrl = `https://github.com/${context.repo.owner}/${context.repo.repo}/blob/${context.payload.pull_request.head.ref}/${mdFile}`; 97 | 98 | core.info('Generated URL: ' + renderedUrl); 99 | 100 | const updatedBody = prBody 101 | .replace('[Full Rendered Proposal]()', `[Full Rendered Proposal](${renderedUrl})`) 102 | .replace('[Discussion Thread]()', `[Discussion Thread](${process.env.DISCUSSION_URL})`); 103 | 104 | // Add labels 105 | await github.rest.issues.addLabels({ 106 | owner: context.repo.owner, 107 | repo: context.repo.repo, 108 | issue_number: context.issue.number, 109 | labels: ['rfc', 'pending'] 110 | }); 111 | 112 | // Lock PR comments 113 | await github.rest.issues.lock({ 114 | owner: context.repo.owner, 115 | repo: context.repo.repo, 116 | issue_number: context.issue.number, 117 | lock_reason: 'resolved' 118 | }); 119 | 120 | // Update PR body 121 | await github.rest.pulls.update({ 122 | owner: context.repo.owner, 123 | repo: context.repo.repo, 124 | pull_number: context.issue.number, 125 | body: updatedBody 126 | }); 127 | env: 128 | DISCUSSION_URL: ${{ steps.create-discussion.outputs.discussion-url }} 129 | MD_FILE: ${{ steps.changed-files.outputs.filename }} -------------------------------------------------------------------------------- /.github/workflows/sync-rfc-discussion.yml: -------------------------------------------------------------------------------- 1 | name: Sync RFC Discussion 2 | 3 | on: 4 | pull_request_target: 5 | types: [synchronize] 6 | paths: 7 | - 'rfcs/**.md' 8 | 9 | jobs: 10 | sync-discussion: 11 | runs-on: ubuntu-latest 12 | permissions: 13 | discussions: write 14 | pull-requests: read 15 | 16 | steps: 17 | - name: Get Changed Files 18 | id: changed-files 19 | uses: actions/github-script@v7 20 | with: 21 | script: | 22 | const { data: files } = await github.rest.pulls.listFiles({ 23 | owner: context.repo.owner, 24 | repo: context.repo.repo, 25 | pull_number: context.issue.number 26 | }); 27 | 28 | const mdFile = files.find(file => file.filename.startsWith('rfcs/') && file.filename.endsWith('.md')); 29 | if (!mdFile) { 30 | core.info('No RFC markdown file found, skipping sync'); 31 | return; 32 | } 33 | core.setOutput('filename', mdFile.filename); 34 | 35 | - name: Get RFC Content 36 | id: get-rfc-content 37 | if: steps.changed-files.outputs.filename 38 | uses: actions/github-script@v7 39 | with: 40 | script: | 41 | const mdFile = '${{ steps.changed-files.outputs.filename }}'; 42 | const { data: fileContent } = await github.rest.repos.getContent({ 43 | owner: context.repo.owner, 44 | repo: context.repo.repo, 45 | path: mdFile, 46 | ref: context.payload.pull_request.head.sha 47 | }); 48 | 49 | const content = Buffer.from(fileContent.content, 'base64').toString('utf8'); 50 | core.setOutput('content', content); 51 | 52 | - name: Find Discussion 53 | id: find-discussion 54 | if: steps.changed-files.outputs.filename 55 | uses: actions/github-script@v7 56 | with: 57 | script: | 58 | const query = ` 59 | query($owner: String!, $repo: String!) { 60 | repository(owner: $owner, name: $repo) { 61 | discussions(first: 50, orderBy: {field: CREATED_AT, direction: DESC}) { 62 | nodes { 63 | id 64 | title 65 | body 66 | } 67 | } 68 | } 69 | } 70 | `; 71 | 72 | const variables = { 73 | owner: context.repo.owner, 74 | repo: context.repo.repo 75 | }; 76 | 77 | const result = await github.graphql(query, variables); 78 | const discussion = result.repository.discussions.nodes.find(d => 79 | d.title === `RFC Discussion: ${context.payload.pull_request.title}` 80 | ); 81 | 82 | if (discussion) { 83 | core.setOutput('discussion_id', discussion.id); 84 | core.info(`Found discussion: ${discussion.id}`); 85 | } else { 86 | core.info('Discussion not found'); 87 | } 88 | 89 | - name: Update Discussion Content 90 | if: steps.find-discussion.outputs.discussion_id && steps.changed-files.outputs.filename 91 | uses: actions/github-script@v7 92 | env: 93 | DISCUSSION_ID: ${{ steps.find-discussion.outputs.discussion_id }} 94 | RFC_CONTENT: ${{ steps.get-rfc-content.outputs.content }} 95 | MD_FILE: ${{ steps.changed-files.outputs.filename }} 96 | PR_TITLE: ${{ github.event.pull_request.title }} 97 | PR_AUTHOR: ${{ github.event.pull_request.user.login }} 98 | PR_NUMBER: ${{ github.event.pull_request.number }} 99 | PR_URL: ${{ github.event.pull_request.html_url }} 100 | REPO_NAME: ${{ github.repository }} 101 | HEAD_REF: ${{ github.event.pull_request.head.ref }} 102 | UPDATED_AT: ${{ github.event.pull_request.updated_at }} 103 | HEAD_SHA: ${{ github.event.pull_request.head.sha }} 104 | HEAD_REPO_URL: ${{ github.event.pull_request.head.repo.html_url }} 105 | with: 106 | script: | 107 | const discussionId = process.env.DISCUSSION_ID; 108 | const rfcContent = process.env.RFC_CONTENT; 109 | const mdFile = process.env.MD_FILE; 110 | 111 | // Build the body content parts 112 | const title = `# RFC Discussion: ${process.env.PR_TITLE}`; 113 | const author = `**Author:** @${process.env.PR_AUTHOR} | **Status:** 🟡 Under Review`; 114 | const links = `## 📋 Quick Links 115 | - 🔧 [Source PR #${process.env.PR_NUMBER}](${process.env.PR_URL}) 116 | - 📝 [View Changes](${process.env.PR_URL}/files) 117 | - 📖 [Rendered Proposal](https://github.com/${process.env.REPO_NAME}/blob/${process.env.HEAD_REF}/${mdFile})`; 118 | 119 | const proposalHeader = `## 📄 Current Proposal`; 120 | const metadata = `> **Last Updated:** ${process.env.UPDATED_AT} 121 | > **Commit:** [\`${process.env.HEAD_SHA}\`](${process.env.HEAD_REPO_URL}/commit/${process.env.HEAD_SHA})`; 122 | 123 | const guidelines = `**💬 Discussion Guidelines:** Share feedback, concerns, and suggestions below. Use reply threads to keep conversations organized.`; 124 | 125 | // Combine all parts 126 | const newBody = [ 127 | title, 128 | '', 129 | author, 130 | '', 131 | links, 132 | '', 133 | '---', 134 | '', 135 | proposalHeader, 136 | '', 137 | metadata, 138 | '', 139 | '', 140 | rfcContent, 141 | '', 142 | '', 143 | '---', 144 | guidelines 145 | ].join('\n'); 146 | 147 | const mutation = ` 148 | mutation($discussionId: ID!, $body: String!) { 149 | updateDiscussion(input: { 150 | discussionId: $discussionId, 151 | body: $body 152 | }) { 153 | discussion { 154 | id 155 | title 156 | } 157 | } 158 | } 159 | `; 160 | 161 | const variables = { 162 | discussionId: discussionId, 163 | body: newBody 164 | }; 165 | 166 | try { 167 | const result = await github.graphql(mutation, variables); 168 | core.info(`Successfully updated discussion: ${result.updateDiscussion.discussion.id}`); 169 | } catch (error) { 170 | core.setFailed(`Failed to update discussion: ${error.message}`); 171 | } -------------------------------------------------------------------------------- /specifications/node_def.py: -------------------------------------------------------------------------------- 1 | 2 | class Example: 3 | """ 4 | A example node 5 | 6 | Class methods 7 | ------------- 8 | INPUT_TYPES (dict): 9 | Tell the main program input parameters of nodes. 10 | IS_CHANGED: 11 | optional method to control when the node is re executed. 12 | 13 | Attributes 14 | ---------- 15 | RETURN_TYPES (`tuple`): 16 | The type of each element in the output tuple. 17 | RETURN_NAMES (`tuple`): 18 | Optional: The name of each output in the output tuple. 19 | FUNCTION (`str`): 20 | The name of the entry-point method. For example, if `FUNCTION = "execute"` then it will run Example().execute() 21 | OUTPUT_NODE ([`bool`]): 22 | If this node is an output node that outputs a result/image from the graph. The SaveImage node is an example. 23 | The backend iterates on these output nodes and tries to execute all their parents if their parent graph is properly connected. 24 | Assumed to be False if not present. 25 | CATEGORY (`str`): 26 | The category the node should appear in the UI. 27 | DEPRECATED (`bool`): 28 | Indicates whether the node is deprecated. Deprecated nodes are hidden by default in the UI, but remain 29 | functional in existing workflows that use them. 30 | EXPERIMENTAL (`bool`): 31 | Indicates whether the node is experimental. Experimental nodes are marked as such in the UI and may be subject to 32 | significant changes or removal in future versions. Use with caution in production workflows. 33 | execute(s) -> tuple || None: 34 | The entry point method. The name of this method must be the same as the value of property `FUNCTION`. 35 | For example, if `FUNCTION = "execute"` then this method's name must be `execute`, if `FUNCTION = "foo"` then it must be `foo`. 36 | """ 37 | def __init__(self): 38 | pass 39 | 40 | @classmethod 41 | def INPUT_TYPES(s): 42 | """ 43 | Return a dictionary which contains config for all input fields. 44 | Some types (string): "MODEL", "VAE", "CLIP", "CONDITIONING", "LATENT", "IMAGE", "INT", "STRING", "FLOAT". 45 | Input types "INT", "STRING" or "FLOAT" are special values for fields on the node. 46 | The type can be a list for selection. 47 | 48 | Returns: `dict`: 49 | - Key input_fields_group (`string`): Can be either required, hidden or optional. A node class must have property `required` 50 | - Value input_fields (`dict`): Contains input fields config: 51 | * Key field_name (`string`): Name of a entry-point method's argument 52 | * Value field_config (`tuple`): 53 | + First value is a string indicate the type of field or a list for selection. 54 | + Second value is a config for type "INT", "STRING" or "FLOAT". 55 | """ 56 | return { 57 | "required": { 58 | "image": ("IMAGE",), 59 | "int_field": ("INT", { 60 | "default": 0, 61 | "min": 0, #Minimum value 62 | "max": 4096, #Maximum value 63 | "step": 64, #Slider's step 64 | "display": "number", # Cosmetic only: display as "number" or "slider" 65 | "lazy": True # Will only be evaluated if check_lazy_status requires it 66 | }), 67 | "float_field": ("FLOAT", { 68 | "default": 1.0, 69 | "min": 0.0, 70 | "max": 10.0, 71 | "step": 0.01, 72 | "round": 0.001, #The value representing the precision to round to, will be set to the step value by default. Can be set to False to disable rounding. 73 | "display": "number", 74 | "lazy": True 75 | }), 76 | "print_to_screen": (["enable", "disable"],), 77 | "string_field": ("STRING", { 78 | "multiline": False, #True if you want the field to look like the one on the ClipTextEncode node 79 | "default": "Hello World!", 80 | "lazy": True 81 | }), 82 | }, 83 | } 84 | 85 | RETURN_TYPES = ("IMAGE",) 86 | RETURN_NAMES = ("image_output_name",) 87 | 88 | FUNCTION = "test" 89 | 90 | OUTPUT_NODE = False 91 | 92 | CATEGORY = "Example" 93 | 94 | def check_lazy_status(self, image, string_field, int_field, float_field, print_to_screen): 95 | """ 96 | Return a list of input names that need to be evaluated. 97 | 98 | This function will be called if there are any lazy inputs which have not yet been 99 | evaluated. As long as you return at least one field which has not yet been evaluated 100 | (and more exist), this function will be called again once the value of the requested 101 | field is available. 102 | 103 | Any evaluated inputs will be passed as arguments to this function. Any unevaluated 104 | inputs will have the value None. 105 | """ 106 | if print_to_screen == "enable": 107 | return ["int_field", "float_field", "string_field"] 108 | else: 109 | return [] 110 | 111 | def test(self, image, string_field, int_field, float_field, print_to_screen): 112 | if print_to_screen == "enable": 113 | print(f"""Your input contains: 114 | string_field aka input text: {string_field} 115 | int_field: {int_field} 116 | float_field: {float_field} 117 | """) 118 | #do some processing on the image, in this example I just invert it 119 | image = 1.0 - image 120 | return (image,) 121 | 122 | """ 123 | The node will always be re executed if any of the inputs change but 124 | this method can be used to force the node to execute again even when the inputs don't change. 125 | You can make this node return a number or a string. This value will be compared to the one returned the last time the node was 126 | executed, if it is different the node will be executed again. 127 | This method is used in the core repo for the LoadImage node where they return the image hash as a string, if the image hash 128 | changes between executions the LoadImage node is executed again. 129 | """ 130 | @classmethod 131 | def IS_CHANGED(s, image, string_field, int_field, float_field, print_to_screen): 132 | return "" 133 | 134 | # Set the web directory, any .js file in that directory will be loaded by the frontend as a frontend extension 135 | WEB_DIRECTORY = "./somejs" 136 | 137 | 138 | # Add custom API routes, using router 139 | from aiohttp import web 140 | from server import PromptServer 141 | 142 | @PromptServer.instance.routes.get("/hello") 143 | async def get_hello(request): 144 | return web.json_response("hello") 145 | 146 | 147 | # A dictionary that contains all nodes you want to export with their names 148 | # NOTE: names should be globally unique 149 | NODE_CLASS_MAPPINGS = { 150 | "Example": Example 151 | } 152 | 153 | # A dictionary that contains the friendly/humanly readable titles for the nodes 154 | NODE_DISPLAY_NAME_MAPPINGS = { 155 | "Example": "Example Node" 156 | } 157 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # rfcs 2 | RFCs for substantial changes to ComfyUI core, APIs, and standards. 3 | 4 | ## What is an RFC? 5 | 6 | The "RFC" (request for comments) process is intended to provide a 7 | consistent and controlled path for new features to enter the framework. 8 | 9 | Many changes, including bug fixes and documentation improvements can be 10 | implemented and reviewed via the normal GitHub pull request workflow. 11 | 12 | Some changes though are "substantial", and we ask that these be put 13 | through a bit of a design process and produce a consensus among the ComfyUI 14 | [core team] and the community. 15 | 16 | ## The RFC life-cycle 17 | 18 | An RFC goes through the following stages: 19 | 20 | - **Pending:** when the RFC is submitted as a PR (with associated discussion thread). We use Discussions instead of Pull Request comments as the former provides better discussion threading. 21 | - **Active:** when an RFC is acknowledged and undergoing implementation. The feature may be shipped as experimental during this phase. 22 | - **Landed:** when an RFC's proposed changes are shipped as stable in a release. 23 | - **Rejected:** when an RFC is officially rejected or dropped. 24 | 25 | ## When to follow this process 26 | 27 | You need to follow this process if you intend to make "substantial" 28 | changes to standards / APIs in the ComfyUI ecosystem. 29 | 30 | What constitutes a "substantial" change is evolving based on community norms, but may include the following: 31 | 32 | - A new addition to or modification of existing ComfyUI core libraries or backend APIs eg. [Model Patcher](https://github.com/comfyanonymous/ComfyUI/commit/0ee322ec5f338791c5836b79830e2f419d6fcc79). If you are writing a custom node and want something exposed by core in an API, please open an issue. 33 | - A major change to ComfyUI functionality, such as the execution engine eg. [Execution Model Inversion](https://github.com/comfyanonymous/ComfyUI/commit/5cfe38f41c7091b0fd954877d9d7427a8b438b1a) 34 | - Changes to workflow.json schema 35 | - Changes to custom node standards 36 | - Request for frontend widgets and APIs 37 | - Nodes that should be provided by core 38 | - Anything that will be relied on by developers using ComfyUI (eg. `/prompt` API) 39 | 40 | If you submit a pull request to implement a new feature without going 41 | through the RFC process, it may be closed with a polite request to 42 | submit an RFC first. 43 | 44 | ## Why do you need to do this 45 | 46 | It is great that you are considering suggesting new features or changes to ComfyUI - we appreciate your willingness to contribute! However, as ComfyUI becomes more widely used, we need to take stability more seriously, and thus have to carefully consider the impact of every change we make that may affect end users. 47 | 48 | These constraints and tradeoffs may not be immediately obvious to users who are proposing a change just to solve a specific problem they just ran into. The RFC process serves as a way to guide you through our thought process when making changes to ComfyUI, so that we can be on the same page when discussing why or why not these changes should be made. 49 | 50 | ## Gathering feedback before submitting 51 | 52 | It's often helpful to get feedback on your concept before diving into the 53 | level of API design detail required for an RFC. **You may open an 54 | issue on this repo to start a high-level discussion**, with the goal of 55 | eventually formulating an RFC pull request with the specific implementation 56 | design. 57 | 58 | Our [Discord](https://discord.com/invite/comfyorg) is also a good place to start a discussion. 59 | 60 | ## What the process is 61 | 62 | In short, to get a major feature added to ComfyUI, one must first get the 63 | RFC merged into the RFC repo as a markdown file. At that point the RFC 64 | is 'active' and may be implemented with the goal of eventual inclusion 65 | into ComfyUI. 66 | 67 | 1. Work on your proposal in a Markdown file based on the template (`0000-template.md`) found in this repo. 68 | 69 | - Put care into the details: **RFCs that do not present convincing motivation, demonstrate understanding of the impact of the design, or are disingenuous about the drawbacks or alternatives tend to be poorly-received**. 70 | 71 | 2. Submit the Markdown file as a pull request. 72 | 73 | - Fork this repo. 74 | 75 | - Create your proposal as `rfcs/0000-my-feature.md` (where "my-feature" is descriptive). Don't assign an RFC number yet, that will be done when the PR is merged. Make sure to place the file in the `rfcs` directory. 76 | 77 | - Submit a pull request. A new thread in [Discussions](https://github.com/comfy-org/rfcs/discussions) will be made automatically. 78 | 79 | - Build consensus and integrate feedback in the discussion thread. RFCs that have broad support are much more likely to make progress than those that don't receive any comments. 80 | 81 | 3. Eventually, the [core team] will decide whether the RFC is a candidate 82 | for inclusion in ComfyUI. 83 | 84 | - An RFC can be modified based upon feedback from the [core team] and community. Significant modifications may trigger a new final comment period. 85 | 86 | - An RFC may be rejected after public discussion has settled and comments have been made summarizing the rationale for rejection. A member of the [core team] should then close the RFC's associated pull request. 87 | 88 | - An RFC may be accepted at the close of its final comment period. A [core team] member will merge the RFC's associated pull request, at which point the RFC will become 'active'. 89 | 90 | ## Details on Active RFCs 91 | 92 | Once an RFC becomes active then authors may implement it and submit the 93 | feature as a pull request to the relevant repo. Becoming 'active' is not a rubber stamp, and in particular still does not mean the feature will ultimately 94 | be merged; it does mean that the [core team] has agreed to it in principle 95 | and are amenable to merging it. 96 | 97 | Furthermore, the fact that a given RFC has been accepted and is 98 | 'active' implies nothing about what priority is assigned to its 99 | implementation, nor whether anybody is currently working on it. 100 | 101 | Modifications to active RFC's can be done in followup PR's. We strive 102 | to write each RFC in a manner that it will reflect the final design of 103 | the feature; but the nature of the process means that we cannot expect 104 | every merged RFC to actually reflect what the end result will be at 105 | the time of the next major release; therefore we try to keep each RFC 106 | document somewhat in sync with the language feature as planned, 107 | tracking such changes via followup pull requests to the document. 108 | 109 | ## Implementing an RFC 110 | 111 | The author of an RFC is not obligated to implement it. Of course, the 112 | RFC author (like any other developer) is welcome to post an 113 | implementation for review after the RFC has been accepted. 114 | 115 | An active RFC should have the link to the implementation PR listed if there is one. Feedback to the actual implementation should be conducted in the implementation PR instead of the original RFC PR. 116 | 117 | If you are interested in working on the implementation for an 'active' 118 | RFC, but cannot determine if someone else is already working on it, 119 | feel free to ask (e.g. by leaving a comment on the associated issue). 120 | 121 | ## Reviewing RFC's 122 | 123 | Members of the [core team] will attempt to review some set of open RFC 124 | pull requests on a regular basis. If a [core team] member believes an RFC PR is ready to be accepted into active status, they can approve the PR using GitHub's review feature to signal their approval of the RFC. 125 | 126 | **ComfyUI's RFC process owes its inspiration to the [Vue RFC process]** 127 | 128 | [vue rfc process]: https://github.com/vuejs/rfcs 129 | [core team]: https://www.comfy.org/about 130 | -------------------------------------------------------------------------------- /rfcs/0003-widget-input-socket.md: -------------------------------------------------------------------------------- 1 | # RFC: Widget Input Socket 2 | 3 | - Start Date: 2025-01-13 4 | - Target Major Version: Frontend v1.16 5 | - Implementation PRs: 6 | 7 | - https://github.com/Comfy-Org/ComfyUI_frontend/pull/3326 8 | - https://github.com/Comfy-Org/litegraph.js/pull/891 9 | 10 | - Reference Issues: 11 | 12 | - 13 | 14 | ## Summary 15 | 16 | This RFC proposes replacing ComfyUI's current widget-to-socket conversion system with a simpler, more intuitive "widget input socket" design. Instead of requiring users to manually convert widgets to sockets through context menus, widgets will automatically display an input socket when hovered over. This socket behaves like any other input socket - allowing users to drag connections to and from it. When connected, the widget becomes disabled (grayed out) to indicate it's receiving an external input. 17 | 18 | This change aims to: 19 | 20 | - Simplify the codebase by removing complex conversion logic 21 | - Improve usability by making connections more discoverable and intuitive 22 | - Align with industry standards seen in tools like Blender 23 | - Reduce the number of clicks needed to make connections 24 | - Eliminate the need to maintain conversion states in workflow files 25 | 26 | The proposal represents a breaking change that would be implemented in [workflow schema v2.0](https://github.com/Comfy-Org/rfcs/pull/2), though existing workflows would continue to function without modification. 27 | 28 | ## Basic example 29 | 30 | ### Current conversion mechanism between widget and socket 31 | 32 | #### Convert from widget to socket 33 | 34 | To convert a widget to a socket, the user needs to first right click the node, and then select the corresponding conversion option from the context menu. 35 | 36 | ![conversion_context_menu](https://github.com/user-attachments/assets/522f163b-8aad-42b2-a899-817c1a0bae75) 37 | 38 | ![converted_socket](https://github.com/user-attachments/assets/ab6c50a3-ee33-443c-89ca-73d3a3c67042) 39 | 40 | #### Convert from socket to widget 41 | 42 | There are two ways to convert a socket to a widget: 43 | 44 | 1\. **Option1**: Right click the node, and select the corresponding conversion option from the context menu. 45 | 46 | ![conversion_context_menu](https://github.com/user-attachments/assets/4e47f740-d607-44da-b49c-4a9bea548656) 47 | 48 | 2\. **Option2**: Drag a link of correct type from an output socket on another node to the widget (Implemented in ). 49 | 50 | https://github.com/user-attachments/assets/360013b2-d350-4fb0-bbce-cb860178d9ed 51 | 52 | ### Proposed design for widget input socket 53 | 54 | When the cursor hovers over a widget, a socket will be shown on the left side of the widget. The socket will be interacted the same way as the current input socket, i.e. 55 | 56 | - user can drag a link from the socket and drop to another node's output socket to create a new link 57 | - user can drag a link from another node's output socket and drop to the socket to create a new link 58 | 59 | ![cursor_hover_on_widget](https://github.com/user-attachments/assets/953867dc-f27c-47de-a06f-aa94a29350a4) 60 | 61 | When grabbing a link, the widget with a matching type will be highlighted. Here in the example, the link type is `INT`, 62 | so the widget `seed` and `steps` are highlighted. 63 | 64 | ![grab_link](https://github.com/user-attachments/assets/9f44ff15-37ae-4fda-af77-24048098582c) 65 | 66 | When connected, the widget will be disabled (grayed out) and the socket will be highlighted. 67 | 68 | ![connected_widget_socket](https://github.com/user-attachments/assets/fe17d9b2-01c6-441a-adc6-f869f7aa3cbf) 69 | 70 | ## Motivation 71 | 72 | 1. **Simplified State Management** 73 | 74 | - Current implementation requires complex state tracking for widget/socket conversion status 75 | - Eliminates need to persist conversion state in workflow files 76 | - Removes the `default_input` configuration complexity from node definitions 77 | - Reduces potential for bugs related to state synchronization 78 | 79 | 2. **Improved Discoverability** 80 | 81 | - New users often struggle to discover the widget-to-socket conversion feature 82 | - Hover-based socket visibility provides immediate visual feedback 83 | - Makes connection capabilities self-evident without requiring documentation 84 | - Follows established UI patterns where hover reveals additional functionality 85 | 86 | 3. **Industry Standard Alignment** 87 | 88 | - Matches behavior in popular node-based tools like Blender 89 | - Reduces learning curve for users coming from other platforms 90 | - Leverages existing mental models from the visual programming community 91 | - Makes ComfyUI feel more familiar to experienced node-based workflow users 92 | 93 | 4. **Cognitive Simplification** 94 | 95 | - Eliminates the artificial distinction between widgets and sockets 96 | - Treats all inputs as potentially connectable by default 97 | - Removes need to teach users about "conversion" as a concept 98 | - Provides a more intuitive "what you see is what you can do" interface 99 | 100 | 5. **Workflow Optimization** 101 | 102 | - Reduces actions needed to create connections from 3+ clicks to 1 drag 103 | - Eliminates context menu navigation time 104 | - Speeds up workflow creation and modification 105 | - Particularly beneficial for complex workflows with many connections 106 | 107 | 6. **Enhanced Accessibility** 108 | 109 | - Reduces fine motor control requirements compared to context menu usage 110 | - Provides larger hit areas for connection interactions 111 | - More forgiving of slight cursor movement during interaction 112 | - Supports users with various input devices more effectively 113 | 114 | 7. **Technical Benefits** 115 | 116 | - Simplifies the codebase by removing conversion logic 117 | - Makes widget behavior more predictable and easier to test 118 | - Reduces potential edge cases in the connection system 119 | - Easier to maintain and extend in the future 120 | 121 | The primary goal is to make ComfyUI more intuitive and efficient to use while reducing implementation complexity. This change would bring the interface more in line with user expectations and industry standards, while simultaneously simplifying the codebase. 122 | 123 | ## Detailed design 124 | 125 | ### Component Updates 126 | 127 | #### LGraphCanvas 128 | 129 | 1. **Widget Socket Rendering** 130 | 131 | - Modify `drawNodeWidgets()` to render an input socket for a widget when: 132 | 133 | - The widget is being hovered 134 | - The socket has an active connection 135 | - A compatible link is being dragged 136 | 137 | - Update `drawNode()` to skip rendering duplicate sockets for widget inputs 138 | 139 | 2. **Interaction Handling** 140 | 141 | - Extend `isOverNodeInput()` to detect cursor position over widget input sockets 142 | - Return `true` when cursor is within the socket's hit area 143 | 144 | #### LGraphWidget 145 | 146 | 1. **Disabled State Management** 147 | 148 | - Add `isDisabled` getter property 149 | 150 | - Returns `true` when the widget has a connected input socket 151 | 152 | - Used to control widget interactivity and visual state 153 | 154 | 2. **Visual Styling** 155 | 156 | - Apply 0.5 opacity to widgets in disabled state 157 | - Maintain visual consistency with standard disabled UI elements 158 | 159 | ### Data Structure Changes 160 | 161 | #### LGraphNode 162 | 163 | 1. **Input Management** 164 | 165 | - Extend `inputs` array to include widget input sockets 166 | - Add widget reference to each input socket object: 167 | 168 | ```typescript 169 | interface InputSocket { 170 | widget?: LGraphWidget; 171 | // ... existing input socket properties 172 | } 173 | ``` 174 | 175 | 2. **Serialization** 176 | 177 | - Maintain compatibility with existing serialization format 178 | 179 | - Reference: [RFC #2](https://github.com/Comfy-Org/rfcs/pull/2) 180 | 181 | - No changes required to current workflow file structure 182 | 183 | ## Drawbacks 184 | 185 | 1. **Implementation Challenges** 186 | 187 | - Need to modify core rendering logic in LGraphCanvas 188 | - Potential edge cases with complex layouts for custom DOM widgets 189 | - Additional complexity in handling hover states during link dragging 190 | - Need to maintain backward compatibility with existing workflows 191 | 192 | 2. **User Experience Trade-offs** 193 | 194 | - Loss of explicit user control over widget/socket conversion 195 | - Hover-based interactions may be less reliable on touch devices 196 | - May be less discoverable than context menu options for some users 197 | 198 | 3. **Migration Concerns** 199 | 200 | - Existing tutorials and documentation will need updates 201 | - Users familiar with the current system will need to adapt 202 | - Custom nodes using the current widget conversion system may need modifications 203 | 204 | ## Adoption strategy 205 | 206 | The transition to the new widget input socket design will be implemented in a single phase: 207 | 208 | ### Implementation Phase (v2.0) 209 | 210 | 1. **Breaking Changes** 211 | 212 | - Replace the existing conversion system with the new widget input socket design 213 | - Remove all conversion-related APIs and context menu options 214 | - Remove `force_input` configurations from node definitions 215 | - Clean up legacy conversion code from the codebase 216 | 217 | 2. **Documentation & Communication** 218 | 219 | - Update official documentation with the new interaction model 220 | - Provide migration guides for node developers 221 | - Create visual tutorials demonstrating the new connection workflow 222 | - Issue clear communication about the breaking changes 223 | 224 | 3. **Ecosystem Impact** 225 | 226 | - Custom node developers will need to: 227 | 228 | - Remove any conversion-specific code 229 | - Update widget definitions to work with new socket system 230 | - Test existing nodes with the new connection behavior 231 | 232 | 4. **User Impact** 233 | 234 | - All existing workflows will continue to work as expected 235 | - Users will immediately benefit from the simplified interaction model 236 | - No manual migration steps required for end users 237 | 238 | This direct approach allows us to quickly realize the benefits of the new design while minimizing the complexity of maintaining two parallel systems. Since the new design is more intuitive and requires less user education, the transition cost is justified by the immediate improvements in usability. 239 | 240 | ## Unresolved questions 241 | -------------------------------------------------------------------------------- /rfcs/0006-trainer.md: -------------------------------------------------------------------------------- 1 | 2 | # RFC: ComfyUI Training Modules 3 | 4 | - Start Date: 2025-03-01 5 | - Target Major Version: TBD 6 | 7 | ## Summary 8 | 9 | This RFC proposes the addition of training capabilities to ComfyUI, enabling users to create and fine-tune LoRA (Low-Rank Adaptation) models directly through the ComfyUI interface. The proposal includes a set of node implementations for loading image datasets, training LoRAs, visualizing training progress, and saving trained models. 10 | 11 | ## Basic example 12 | 13 | The basic workflow would allow users to: 14 | 15 | 1. Load an image dataset: 16 | ![image](https://github.com/user-attachments/assets/3e00d09c-14ea-432d-a694-270ab13367ec) 17 | 18 | 19 | 2. Train a LoRA on these images: 20 | ![image](https://github.com/user-attachments/assets/e631e59a-9944-4fc2-b6dd-13e0f0c132f9) 21 | 22 | 3. Save the resulting LoRA: 23 | ![image](https://github.com/user-attachments/assets/dbcbf1e4-af13-4095-86e9-7c4eada23432) 24 | 25 | 26 | 4. Visualize training loss: 27 | ![image](https://github.com/user-attachments/assets/d036b420-ef6c-4d2e-af55-d25c11724623) 28 | 29 | 30 | ## Motivation 31 | 32 | Currently, users who want to create custom LoRA models need to: 33 | 34 | 1. Use external tools and scripts for training, which often requires command-line expertise 35 | 2. Set up specialized environments for training 36 | 3. Manually move the trained models between systems 37 | 38 | Adding training capabilities directly to ComfyUI would: 39 | 40 | 1. **Simplify the training workflow**: Users can train models in the same interface where they use them 41 | 2. **Increase accessibility**: Users without programming experience can customize models 42 | 3. **Enable rapid iteration**: The ability to train and immediately test models in the same interface 43 | 4. **Provide visual feedback**: Real-time visualization of the training process 44 | 5. **Maintain workflow continuity**: The entire model creation, training, and inference pipeline can be represented as a unified workflow 45 | 46 | ## Detailed design 47 | 48 | The implementation consists of four main components: 49 | 50 | ### 1. Image Dataset Loading 51 | 52 | Two nodes are proposed for loading image datasets: 53 | 54 | - `LoadImageSetNode`: Loads individual images selected by the user 55 | - `LoadImageSetFromFolderNode`: Loads all images from a specified folder 56 | 57 | These nodes offer options for handling images of different sizes (stretch, crop, pad) and prepare the images for training. 58 | 59 | ```python 60 | class LoadImageSetFromFolderNode: 61 | @classmethod 62 | def INPUT_TYPES(s): 63 | return { 64 | "required": { 65 | "folder": (folder_paths.get_input_subfolders(), {"tooltip": "The folder to load images from."}) 66 | }, 67 | "optional": { 68 | "resize_method": ( 69 | ["None", "Stretch", "Crop", "Pad"], 70 | {"default": "None"}, 71 | ), 72 | } 73 | } 74 | 75 | RETURN_TYPES = ("IMAGE",) 76 | FUNCTION = "load_images" 77 | CATEGORY = "loaders" 78 | EXPERIMENTAL = True 79 | DESCRIPTION = "Loads a batch of images from a directory for training." 80 | ``` 81 | 82 | ### 2. LoRA Training Node 83 | 84 | The `TrainLoraNode` is the core component that handles the training process: 85 | 86 | ```python 87 | class TrainLoraNode: 88 | @classmethod 89 | def INPUT_TYPES(s): 90 | return { 91 | "required": { 92 | "model": (IO.MODEL, {"tooltip": "The model to train the LoRA on."}), 93 | "vae": (IO.VAE, {"tooltip": "The VAE model to use for encoding images for training."}), 94 | "positive": (IO.CONDITIONING, {"tooltip": "The positive conditioning to use for training."}), 95 | "image": (IO.IMAGE, {"tooltip": "The image or image batch to train the LoRA on."}), 96 | "batch_size": (IO.INT, {"default": 1, "min": 1, "max": 10000, "step": 1}), 97 | "steps": (IO.INT, {"default": 50, "min": 1, "max": 1000}), 98 | "learning_rate": (IO.FLOAT, {"default": 0.0003, "min": 0.0000001, "max": 1.0, "step": 0.00001}), 99 | "rank": (IO.INT, {"default": 8, "min": 1, "max": 128}), 100 | "optimizer": (["Adam", "AdamW", "SGD", "RMSprop"], {"default": "Adam"}), 101 | "loss_function": (["MSE", "L1", "Huber", "SmoothL1"], {"default": "MSE"}), 102 | "seed": (IO.INT, {"default": 0, "min": 0, "max": 0xFFFFFFFFFFFFFFFF}), 103 | "training_dtype": (["bf16", "fp32"], {"default": "bf16"}), 104 | "existing_lora": (folder_paths.get_filename_list("loras") + ["[None]"], {"default": "[None]"}), 105 | }, 106 | } 107 | 108 | RETURN_TYPES = (IO.MODEL, IO.LORA_MODEL, IO.LOSS_MAP, IO.INT) 109 | RETURN_NAMES = ("model_with_lora", "lora", "loss", "steps") 110 | FUNCTION = "train" 111 | CATEGORY = "training" 112 | EXPERIMENTAL = True 113 | ``` 114 | 115 | The training process: 116 | 1. Takes a batch of images and encodes them using a VAE 117 | 2. Sets up LoRA layers for all eligible weights in the model 118 | 3. Configures an optimizer and loss function based on user selections 119 | 4. Performs gradient-based training for the specified number of steps 120 | 5. Returns the model with LoRA applied, the LoRA weights, a map of training losses, and the total training steps 121 | 122 | ### 3. Model Saving Node 123 | 124 | The `SaveLoRA` node enables users to save their trained LoRA models: 125 | 126 | ```python 127 | class SaveLoRA: 128 | @classmethod 129 | def INPUT_TYPES(s): 130 | return { 131 | "required": { 132 | "lora": (IO.LORA_MODEL, {"tooltip": "The LoRA model to save."}), 133 | "prefix": (IO.STRING, {"default": "trained_lora"}), 134 | }, 135 | "optional": { 136 | "steps": (IO.INT, {"forceInput": True}), 137 | }, 138 | } 139 | 140 | RETURN_TYPES = () 141 | FUNCTION = "save" 142 | CATEGORY = "loaders" 143 | EXPERIMENTAL = True 144 | OUTPUT_NODE = True 145 | ``` 146 | 147 | The node saves the LoRA weights in SafeTensors format, with a filename that includes the number of training steps and a timestamp. 148 | 149 | ### 4. Training Visualization Node 150 | 151 | The `LossGraphNode` visualizes the training progress: 152 | 153 | ```python 154 | class LossGraphNode: 155 | @classmethod 156 | def INPUT_TYPES(s): 157 | return { 158 | "required": { 159 | "loss": (IO.LOSS_MAP, {"default": {}}), 160 | "filename_prefix": (IO.STRING, {"default": "loss_graph"}), 161 | }, 162 | } 163 | 164 | RETURN_TYPES = () 165 | FUNCTION = "plot_loss" 166 | OUTPUT_NODE = True 167 | CATEGORY = "training" 168 | EXPERIMENTAL = True 169 | DESCRIPTION = "Plots the loss graph and saves it to the output directory." 170 | ``` 171 | 172 | This node generates a graph showing the training loss over time, providing visual feedback on the training process. 173 | 174 | ### Supporting Components 175 | 176 | The implementation also includes several support classes: 177 | 178 | 1. `TrainSampler`: A custom sampler that performs gradient updates during the sampling process 179 | 2. `LoraDiff` and `BiasDiff`: Weight wrapper classes that apply LoRA adaptations to model weights 180 | 181 | ## Drawbacks 182 | 183 | 1. **Resource Consumption**: Training is computationally intensive and may strain systems with limited resources 184 | 2. **UI Responsiveness**: Long training processes could make the ComfyUI interface less responsive 185 | 3. **Complexity**: Adding training capabilities increases the complexity of the ComfyUI codebase 186 | 4. **Learning Curve**: Users may need to understand more ML concepts to effectively use the training features 187 | 188 | ## Adoption strategy 189 | 190 | 1. **Experimental Flag**: Initially release nodes with the `EXPERIMENTAL = True` flag to indicate the developing nature of the feature 191 | 2. **Documentation**: Provide comprehensive documentation and tutorial workflows 192 | 3. **Gradual Feature Addition**: Start with basic LoRA training and expand to other training types based on user feedback 193 | 4. **Default Parameters**: Set sensible defaults to help users get started without deep ML knowledge 194 | 195 | ## Unresolved questions 196 | 197 | 1. **Memory Management**: How will the system handle memory during training, especially for large models and datasets? 198 | 2. **Checkpoint Frequency**: Should the system automatically save checkpoints during training to prevent loss of progress? 199 | 3. **Training Interruption**: How should the system handle interrupted training sessions? 200 | 4. **Hyperparameter Optimization**: Should the system provide tools for automatically finding optimal hyperparameters? 201 | 5. **Multi-GPU Support**: How will training utilize multiple GPUs if available? 202 | 6. **Integration with Existing Workflows**: How can trained models be seamlessly integrated into existing inference workflows? 203 | 7. **Performance Metrics**: Should additional metrics beyond loss be tracked and visualized? 204 | 8. **Dataset Preparation**: Should the system provide more tools for dataset curation and augmentation? 205 | 206 | ## Implementation Plan 207 | 208 | ### Phase 1: Basic LoRA Training 209 | 210 | Initial implementation of the nodes described in this RFC. 211 | 212 | ### Phase 2: Enhanced Features 213 | 214 | - Checkpoint saving during training 215 | - More advanced training visualizations 216 | - Support for additional training techniques (e.g., DreamBooth, Control model training like Control LoRA and IPA) 217 | 218 | ### Phase 3: Workflow Integration 219 | 220 | - Templates for common training scenarios 221 | - Integration with model merging and inference workflows 222 | - Advanced dataset management tools 223 | 224 | ### Phase 4: Model Format 225 | 226 | - New model format to improve model memory management and metadata of models in ComfyUI 227 | 228 | ## Links 229 | 230 | 234 | 235 | - [Full Rendered Proposal]() 236 | 237 | - [Discussion Thread]() 238 | 239 | 242 | 243 | --- 244 | 245 | **Important: Do NOT comment on this PR. Please use the discussion thread linked above to provide feedback, as it provides branched discussions that are easier to follow. This also makes the edit history of the PR clearer.** 246 | -------------------------------------------------------------------------------- /rfcs/0004-widget-values-format.md: -------------------------------------------------------------------------------- 1 | # RFC: Workflow JSON Widget Values Format 2 | 3 | - Start Date: 2025-01-08 4 | - Target Major Version: TBD 5 | 6 | ## Summary 7 | 8 | This RFC proposes a new format for handling widget values in ComfyUI workflows by integrating them directly into the node inputs array instead of storing them in a separate `widgets_values` array. The new format improves type safety, maintainability, and self-documentation of workflows by making each widget value a named, typed input with explicit metadata. This change will require a version bump in the workflow schema from 1.0 to 2.0, but includes backward compatibility measures to ensure a smooth transition for existing workflows and custom nodes. 9 | 10 | ## Basic example 11 | 12 | ![image](https://github.com/user-attachments/assets/e36113a9-20d6-406a-9a83-209c86b91107) 13 | 14 | Current format node serialization format: 15 | 16 | ```json 17 | { 18 | "id": 3, 19 | "type": "KSampler", 20 | "pos": [863, 186], 21 | "size": [315, 262], 22 | "flags": {}, 23 | "order": 4, 24 | "mode": 0, 25 | "inputs": [ 26 | { "name": "model", "type": "MODEL", "link": 1 }, 27 | { "name": "positive", "type": "CONDITIONING", "link": 4 }, 28 | { "name": "negative", "type": "CONDITIONING", "link": 6 }, 29 | { "name": "latent_image", "type": "LATENT", "link": 2 }, 30 | { 31 | "name": "seed", 32 | "type": "INT", 33 | "link": null, 34 | "widget": { 35 | "name": "seed" 36 | } 37 | } 38 | ], 39 | "outputs": [{ "name": "LATENT", "type": "LATENT", "links": [7], "slot_index": 0 }], 40 | "properties": {}, 41 | "widgets_values": [156680208700286, true, 20, 8, "euler", "normal", 1] 42 | } 43 | ``` 44 | 45 | Proposed format: 46 | 47 | ```json 48 | { 49 | "id": 3, 50 | "type": "KSampler", 51 | "pos": [863, 186], 52 | "size": [315, 262], 53 | "flags": {}, 54 | "order": 4, 55 | "mode": 0, 56 | "inputs": [ 57 | { "name": "model", "type": "MODEL", "link": 1 }, 58 | { "name": "positive", "type": "CONDITIONING", "link": 4 }, 59 | { "name": "negative", "type": "CONDITIONING", "link": 6 }, 60 | { "name": "latent_image", "type": "LATENT", "link": 2 }, 61 | { "name": "seed", "type": "INT", "value": 156680208700286, "link": null }, 62 | { "name": "denoise", "type": "FLOAT", "value": 1.0 }, 63 | { "name": "steps", "type": "INT", "value": 20 }, 64 | { "name": "cfg", "type": "FLOAT", "value": 8 }, 65 | { "name": "sampler_name", "type": "COMBO", "value": "euler" }, 66 | { "name": "scheduler", "type": "COMBO", "value": "normal" }, 67 | { "name": "denoise", "type": "FLOAT", "value": 1.0 }, 68 | ], 69 | "outputs": [{ "name": "LATENT", "type": "LATENT", "links": [7], "slot_index": 0 }], 70 | "properties": {}, 71 | } 72 | ``` 73 | 74 | In the new format, the `link` field determines the input's behavior and UI representation: 75 | 76 | - `link: undefined` - Rendered as a widget (e.g., text input, slider, dropdown) 77 | 78 | ```json 79 | { "name": "steps", "type": "INT", "value": 20 } 80 | ``` 81 | 82 | - `link: null` - Rendered as an unconnected input socket 83 | 84 | ```json 85 | { "name": "seed", "type": "INT", "value": 156680208700286, "link": null } 86 | ``` 87 | 88 | - `link: number` - Rendered as a connected input socket with the specified link ID 89 | 90 | ```json 91 | { "name": "model", "type": "MODEL", "link": 1 } 92 | ``` 93 | 94 | This distinction allows nodes to clearly indicate whether an input should be displayed as an interactive widget or as a connection point, while maintaining the ability to store default values for unconnected inputs. 95 | 96 | ## Motivation 97 | 98 | The proposed format change addresses several key limitations in the current widget values implementation: 99 | 100 | 1. **Unified Input Handling**: By moving widget values into the inputs array, we create a single, consistent way to handle all node inputs. This simplifies the node processing logic and reduces the need for special-case handling of widget values versus connected inputs. 101 | 102 | 2. **Self-Describing Nodes**: The new format makes nodes more self-documenting by including input names and types directly in the node definition. This allows: 103 | 104 | - Reconstruction of node displays without requiring access to the full node definition 105 | - Better error checking and validation of values 106 | - Improved debugging capabilities 107 | - Easier serialization/deserialization of workflows 108 | 109 | 3. **Flexible Parameter Management**: The array-based structure of the current format makes it difficult to: 110 | 111 | - Insert new parameters in the middle of the list 112 | - Remove deprecated parameters 113 | - Maintain backward compatibility The named input approach solves these issues by making parameter order irrelevant. 114 | 115 | 4. **Type Safety**: Explicit type definitions for each input value helps prevent type-related errors and makes it easier to implement proper validation at both runtime and development time. 116 | 117 | ## Detailed design 118 | 119 | ### Schema Changes 120 | 121 | The workflow schema will be updated from version 1.0 to 2.0 to accommodate the new widget values format. Here's the proposed Zod schema changes: 122 | 123 | ```typescript 124 | // Version 2.0 Node Input Schema 125 | const NodeInputV2 = z.object({ 126 | name: z.string(), 127 | type: z.string(), 128 | link: z.number().optional(), 129 | value: z.any().optional(), // For widget values 130 | }); 131 | 132 | // Version 2.0 Node Schema 133 | const NodeV2 = z.object({ 134 | id: z.number(), 135 | type: z.string(), 136 | pos: z.tuple([z.number(), z.number()]), 137 | size: z.tuple([z.number(), z.number()]), 138 | flags: z.record(z.any()), 139 | order: z.number(), 140 | mode: z.number(), 141 | inputs: z.array(NodeInputV2), 142 | outputs: z.array(NodeOutput), 143 | properties: z.record(z.any()), 144 | // widgets_values field removed 145 | }); 146 | ``` 147 | 148 | ### Version Conversion 149 | 150 | To maintain backward compatibility, the system will include bidirectional converters between versions: 151 | 152 | ```typescript 153 | function convertTo2(nodeV1: NodeV1): NodeV2 { 154 | const widgetDefinitions = getNodeWidgetDefinitions(nodeV1.type); 155 | 156 | // Convert widget values to input format 157 | const widgetInputs = widgetDefinitions.map((def, index) => ({ 158 | name: def.name, 159 | type: def.type, 160 | value: nodeV1.widgets_values[index] 161 | })); 162 | 163 | return { 164 | ...nodeV1, 165 | inputs: [...nodeV1.inputs, ...widgetInputs], 166 | widgets_values: undefined // Remove widget_values field 167 | }; 168 | } 169 | 170 | function convertTo1(nodeV2: NodeV2): NodeV1 { 171 | const widgetDefinitions = getNodeWidgetDefinitions(nodeV2.type); 172 | const regularInputs = nodeV2.inputs.filter(input => !widgetDefinitions.find(def => def.name === input.name)); 173 | const widgetValues = widgetDefinitions.map(def => { 174 | const input = nodeV2.inputs.find(i => i.name === def.name); 175 | return input?.value; 176 | }); 177 | 178 | return { 179 | ...nodeV2, 180 | inputs: regularInputs, 181 | widgets_values: widgetValues 182 | }; 183 | } 184 | ``` 185 | 186 | ### Workflow Export Options 187 | 188 | When exporting workflows, users will be presented with schema version choices: 189 | 190 | 1. Latest Version (2.0) - Default 191 | 2. Legacy Version (1.0) - For compatibility with older systems (Beta Reroute) 192 | 3. Legacy Version (0.4) - For compatibility with older systems 193 | 194 | The export dialog will include version selection and automatically convert the workflow to the selected format. 195 | 196 | ### Unknown Node Handling 197 | 198 | For nodes without available definitions, LiteGraph will use the input metadata to render a basic representation: 199 | 200 | 1. Regular inputs will be rendered as connection points 201 | 2. Widget inputs will be rendered as appropriate UI controls based on their type: 202 | 203 | - INT/FLOAT -> Number input 204 | - STRING -> Text input 205 | - COMBO -> Dropdown (with available values if provided) 206 | - BOOLEAN -> Checkbox 207 | 208 | ```typescript 209 | class LiteGraphNode { 210 | // ... existing code ... 211 | 212 | renderUnknownNode() { 213 | this.inputs.forEach(input => { 214 | if (input.link !== undefined) { 215 | // Render connection point 216 | this.addInput(input.name, input.type); 217 | } else if (input.value !== undefined) { 218 | // Render widget based on type 219 | this.addWidget( 220 | this.getWidgetTypeFromInputType(input.type), 221 | input.name, 222 | input.value, 223 | (v) => { input.value = v; } 224 | ); 225 | } 226 | }); 227 | } 228 | } 229 | ``` 230 | 231 | This approach ensures that even unknown nodes remain editable and maintain their configuration, even if specialized behavior isn't available. 232 | 233 | ## Drawbacks 234 | 235 | 1. **Increased Storage Size**: The new format is more verbose due to including field names and types for each widget value, which will increase the size of saved workflow files. While modern storage and network capabilities make this a minor concern, it's worth noting for systems handling large numbers of workflows. 236 | 237 | 2. **Migration Complexity**: Existing workflows will need to be migrated to the new format, requiring: 238 | 239 | - Development of reliable conversion utilities 240 | - Testing of conversion edge cases 241 | - Potential maintenance of multiple format versions during transition 242 | - Additional documentation for handling legacy formats 243 | 244 | 3. **Performance Considerations**: The new format requires: 245 | 246 | - More complex parsing logic compared to simple array access 247 | - Additional memory usage due to storing metadata with each value 248 | - Potentially slower lookup times when accessing widget values (object property access vs array index) 249 | 250 | 4. **Backward Compatibility Challenges**: While the proposal includes conversion utilities, there may be: 251 | 252 | - Third-party tools that need updating 253 | - Custom node implementations that assume the array-based format 254 | - Existing scripts or automation that parse workflow files directly 255 | 256 | 5. **Learning Curve**: Users and developers familiar with the current array-based format will need to adapt to the new structure, potentially leading to initial confusion and requiring updates to documentation and tutorials. 257 | 258 | Despite these drawbacks, the benefits of improved maintainability, type safety, and self-documentation likely outweigh these concerns in the long term. 259 | 260 | ## Adoption strategy 261 | 262 | The transition to the new widget values format will be implemented through a phased approach to ensure smooth adoption: 263 | 264 | 1. **Version Support** 265 | 266 | - ComfyUI will support both 1.0 and 2.0 formats simultaneously during the transition period 267 | - The internal format will be 2.0, with automatic conversion happening at workflow load/save 268 | - All new features will target the 2.0 format 269 | 270 | 2. **Breaking Changes** 271 | 272 | - This is technically a breaking change, but will be implemented with backward compatibility 273 | - Existing workflows using the 1.0 format will continue to work through automatic conversion 274 | - Node developers will need to update their implementations to support the new format 275 | 276 | 3. **Migration Path** 277 | 278 | - For ComfyUI Users: 279 | 280 | - Existing workflows will be automatically converted when loaded 281 | - No manual intervention required 282 | - Option to export in legacy format for compatibility with older systems 283 | 284 | - For Node Developers: 285 | 286 | - Deprecation notices for direct `widgets_values` access 287 | - New helper functions for accessing widget values through the inputs array 288 | - Migration guide and examples provided in documentation 289 | - Grace period of 2-3 releases before removing `widgets_values` support 290 | 291 | 4. **Ecosystem Impact** 292 | 293 | - Code search shows only ~10 custom node repositories directly accessing `widget_values` 294 | - ComfyUI team can directly contribute fixes to these repositories 295 | - API clients and workflow manipulation tools will need modification 296 | - Web UI extensions may require updates for the new format 297 | - Compatibility layer will be provided during transition: 298 | 299 | ```javascript 300 | get widgets_values() { 301 | console.warn("Deprecated: accessing widgets_values directly. Please migrate to input values."); 302 | return this.inputs 303 | .filter(input => input.value !== undefined) 304 | .map(input => input.value); 305 | } 306 | ``` 307 | 308 | 5. **Timeline** 309 | 310 | - Beta release with dual format support 311 | - 3-month transition period with both formats supported 312 | - Full migration to 2.0 format in next major version 313 | 314 | ## Unresolved questions 315 | 316 | TBD 317 | -------------------------------------------------------------------------------- /rfcs/0001-object_info_v2.md: -------------------------------------------------------------------------------- 1 | # RFC: ComfyUI API Improvements 2 | 3 | - Start Date: 2025-02-03 4 | - Target Major Version: TBD 5 | 6 | ## Summary 7 | 8 | This RFC proposes three key improvements to the ComfyUI API: 9 | 10 | 1. Lazy loading for COMBO input options to reduce initial payload size 11 | 2. Restructuring node output specifications for better maintainability 12 | 3. Explicit COMBO type definition for clearer client-side handling 13 | 14 | ## Basic example 15 | 16 | ### 1\. Lazy Loading COMBO Options 17 | 18 | ```python 19 | # Before 20 | class CheckpointLoader: 21 | @classmethod 22 | def INPUT_TYPES(s): 23 | return { 24 | "required": { 25 | "config_name": (folder_paths.get_filename_list("configs"),), 26 | } 27 | } 28 | 29 | # After 30 | class CheckpointLoader: 31 | @classmethod 32 | def INPUT_TYPES(s): 33 | return { 34 | "required": { 35 | "config_name": ("COMBO", { 36 | "type" : "remote", 37 | "route": "/internal/files", 38 | "response_key" : "files", 39 | "query_params" : { 40 | "folder_path" : "configs" 41 | } 42 | }), 43 | } 44 | } 45 | ``` 46 | 47 | ### 2\. Improved Output Specification 48 | 49 | ```python 50 | # Before 51 | RETURN_TYPES = ("CONDITIONING","CONDITIONING") 52 | RETURN_NAMES = ("positive", "negative") 53 | OUTPUT_IS_LIST = (False, False) 54 | OUTPUT_TOOLTIPS = ("positive-tooltip", "negative-tooltip") 55 | 56 | # After 57 | RETURNS = ( 58 | {"type": "CONDITIONING", "name": "positive", "is_list": False, "tooltip": "positive-tooltip"}, 59 | {"type": "CONDITIONING", "name": "negative", "is_list": False, "tooltip": "negative-tooltip"}, 60 | ) 61 | ``` 62 | 63 | ### 3\. Explicit COMBO Type 64 | 65 | ```python 66 | # Before 67 | "combo input": [[1, 2, 3], { default: 2 }] 68 | 69 | # After 70 | "combo input": ["COMBO", { options: [1, 2, 3], default: 2}] 71 | ``` 72 | 73 | ## Motivation 74 | 75 | 1. **Full recompute**: If the user wants to refresh the COMBO options for a single folder, they need to recompute the entire node definitions. This is a very slow process and not user friendly. 76 | 77 | 2. **Large Payload Issue**: The `/object_info` API currently returns several MB of JSON data, primarily due to eager loading of COMBO options. This impacts initial load times and overall performance. 78 | 79 | 3. **Output Specification Maintenance**: The current format for defining node outputs requires modifications in multiple lists, making it error-prone and difficult to maintain. Adding new features like tooltips would further complicate this. 80 | 81 | 4. **Implicit COMBO Type**: The current implementation requires client-side code to infer COMBO types by checking if the first parameter is a list, which is not intuitive and could lead to maintenance issues. 82 | 83 | ## Detailed design 84 | 85 | The implementation will be split into two phases to minimize disruption: 86 | 87 | ### Phase 1: Combo Specification Changes 88 | 89 | #### 1.1 New Combo Specification 90 | 91 | Input types will be explicitly defined using tuples with configuration objects. A variant of the `COMBO` type will be added to support lazy loading options from the server. 92 | 93 | ```python 94 | @classmethod 95 | def INPUT_TYPES(s): 96 | return { 97 | "required": { 98 | # Remote combo 99 | "ckpt_name": ("COMBO", { 100 | "type": "remote", 101 | "route": "/internal/files", 102 | "response_key": "files", 103 | "refresh": 0, # TTL in ms. 0 = do not refresh after initial load. 104 | "query_params": { 105 | "folder_path": "checkpoints", 106 | "filter_ext": [".ckpt", ".safetensors"] 107 | } 108 | }), 109 | "mode": ("COMBO", { 110 | "options": ["balanced", "speed", "quality"], 111 | "default": "balanced", 112 | "tooltip": "Processing mode" 113 | }) 114 | } 115 | } 116 | ``` 117 | 118 | Use a Proxy on remote combo widgets' values property that doesn't compute/fetch until first access. 119 | 120 | ```typescript 121 | COMBO(node, inputName, inputData: InputSpec, app, widgetName) { 122 | 123 | // ... 124 | 125 | const res = { 126 | widget: node.addWidget('combo', inputName, defaultValue, () => {}, { 127 | // Support old and new combo input specs 128 | values: widgetStore.isComboInputV2(inputData) 129 | ? inputData[1].options 130 | : inputType 131 | }) 132 | } 133 | 134 | if (type === 'remote') { 135 | const remoteWidget = useRemoteWidget(inputData) 136 | 137 | const origOptions = res.widget.options 138 | res.widget.options = new Proxy(origOptions, { 139 | // Defer fetching until first access (node added to graph) 140 | get(target, prop: string | symbol) { 141 | if (prop !== 'values') return target[prop] 142 | 143 | // Start non-blocking fetch 144 | remoteWidget.fetchOptions().then((data) => {}) 145 | 146 | const current = remoteWidget.getCacheEntry() 147 | return current?.data || widgetStore.getDefaultValue(inputData) 148 | } 149 | }) 150 | } 151 | ``` 152 | 153 | Backoff time will be determined by the number of failed attempts: 154 | 155 | ```typescript 156 | // Exponential backoff with max of 10 seconds 157 | const backoff = Math.min(1000 * 2 ** (failedAttempts - 1), 10000); 158 | 159 | // Example backoff times: 160 | // Attempt 1: 1000ms (1s) 161 | // Attempt 2: 2000ms (2s) 162 | // Attempt 3: 4000ms (4s) 163 | // Attempt 4: 8000ms (8s) 164 | // Attempt 5+: 10000ms (10s) 165 | ``` 166 | 167 | Share computation results between widgets using a key based on the route and query params: 168 | 169 | ```typescript 170 | // Global cache for memoizing fetches 171 | const dataCache = new Map>(); 172 | 173 | function getCacheKey(options: RemoteWidgetOptions): string { 174 | return JSON.stringify({ route: options.route, params: options.query_params }); 175 | } 176 | ``` 177 | 178 | The cache can be invalidated in two ways: 179 | 180 | 1. **TTL-based**: Using the `refresh` parameter to specify a time-to-live in milliseconds. When TTL expires, next access triggers a new fetch. 181 | 2. **Manual**: Using the `forceUpdate` method of the widget, which deletes the cache entry and triggers a new fetch on next access. 182 | 183 | Example TTL usage: 184 | 185 | ```python 186 | "ckpt_name": ("COMBO", { 187 | "type": "remote", 188 | "refresh": 60000, # Refresh every minute 189 | // ... other options 190 | }) 191 | ``` 192 | 193 | #### 1.2 New Endpoints 194 | 195 | ```python 196 | @routes.get("/internal/files/{folder_name}") 197 | async def list_folder_files(request): 198 | folder_name = request.match_info["folder_name"] 199 | filter_ext = request.query.get("filter_ext", "").split(",") 200 | filter_content_type = request.query.get("filter_content_type", "").split(",") 201 | 202 | files = folder_paths.get_filename_list(folder_name) 203 | if filter_ext and filter_ext[0]: 204 | files = [f for f in files if any(f.endswith(ext) for ext in filter_ext)] 205 | if filter_content_type and filter_content_type[0]: 206 | files = folder_paths.filter_files_content_type(files, filter_content_type) 207 | 208 | return web.json_response({ 209 | "files": files, 210 | }) 211 | ``` 212 | 213 | #### 1.3 Gradual Change with Nodes 214 | 215 | Nodes will be updated incrementally to use the new combo specification. 216 | 217 | ### Phase 2: Node Output Specification Changes 218 | 219 | #### 2.1 New Output Format 220 | 221 | Nodes will transition from multiple return definitions to a single `RETURNS` tuple: 222 | 223 | ```python 224 | # Current format will be supported during transition 225 | RETURN_TYPES = ("CONDITIONING", "CONDITIONING") 226 | RETURN_NAMES = ("positive", "negative") 227 | OUTPUT_IS_LIST = (False, False) 228 | OUTPUT_TOOLTIPS = ("positive-tooltip", "negative-tooltip") 229 | 230 | # New format 231 | RETURNS = ( 232 | { 233 | "type": "CONDITIONING", 234 | "name": "positive", 235 | "is_list": False, 236 | "tooltip": "positive-tooltip", 237 | "optional": False # New field for optional outputs 238 | }, 239 | { 240 | "type": "CONDITIONING", 241 | "name": "negative", 242 | "is_list": False, 243 | "tooltip": "negative-tooltip" 244 | } 245 | ) 246 | ``` 247 | 248 | #### 2.2 New Response Format 249 | 250 | Old format: 251 | 252 | ```javascript 253 | { 254 | "CheckpointLoader": { 255 | "input": { 256 | "required": { 257 | "ckpt_name": [[ 258 | "file1", 259 | "file2", 260 | ... 261 | "fileN", 262 | ]], 263 | "combo_input": [[ 264 | "option1", 265 | "option2", 266 | ... 267 | "optionN", 268 | ], { 269 | "default": "option1", 270 | "tooltip": "Processing mode" 271 | }], 272 | }, 273 | "optional": {} 274 | }, 275 | "output": ["MODEL"], 276 | "output_name": ["model"], 277 | "output_is_list": [false], 278 | "output_tooltip": ["The loaded model"], 279 | "output_node": false, 280 | "category": "loaders" 281 | } 282 | } 283 | ``` 284 | 285 | New format: 286 | 287 | ```javascript 288 | { 289 | "CheckpointLoader": { 290 | "input": { 291 | "required": { 292 | "ckpt_name": [ 293 | "COMBO", 294 | { 295 | "type" : "remote", 296 | "route": "/internal/files", 297 | "response_key" : "files", 298 | "query_params" : { 299 | "folder_path" : "checkpoints" 300 | } 301 | } 302 | ], 303 | "combo_input": [ 304 | "COMBO", 305 | { 306 | "options": ["option1", "option2", ... "optionN"], 307 | "default": "option1", 308 | "tooltip": "Processing mode" 309 | } 310 | ], 311 | }, 312 | "optional": {} 313 | }, 314 | "output": [ 315 | { 316 | "type": "MODEL", 317 | "name": "model", 318 | "is_list": false, 319 | "tooltip": "The loaded model" 320 | } 321 | ], 322 | "output_node": false, 323 | "category": "loaders" 324 | } 325 | } 326 | ``` 327 | 328 | #### 2.3 Compatibility Layer 329 | 330 | Transformations will be applied on the frontend to convert the old format to the new format. 331 | 332 | #### 2.4 Gradual Change with Nodes 333 | 334 | Nodes will be updated incrementally to use the new output specification format. 335 | 336 | ### Migration Support 337 | 338 | To support gradual migration, the API will: 339 | 340 | 1. **Dual Support**: Accept both old and new node definitions 341 | 2. **Compatibility Layer**: Include a compatibility layer in the frontend that can type check and handle both old and new formats. 342 | 343 | ## Drawbacks 344 | 345 | 1. **Migration Effort**: Users and node developers will need to update their code to match the new formats. 346 | 2. **Additional Complexity**: Lazy loading adds network requests, which could complicate error handling and state management. 347 | 348 | ## Adoption strategy 349 | 350 | 1. **Version Support**: Maintain backward compatibility for at least one major version. 351 | 2. **Migration Guide**: Provide detailed documentation and migration scripts. 352 | 3. **Gradual Rollout**: Implement changes in phases, starting with lazy loading. 353 | 354 | ## Unresolved questions 355 | 356 | ### Resolved 357 | 358 | 1. ~~Network failure handling~~ - Implemented with exponential backoff 359 | 2. ~~Caching strategy~~ - Per-widget initialization with manual invalidation 360 | 361 | ### Implementation Details 362 | 363 | 3. Should we provide a migration utility for updating existing nodes? 364 | 4. How do we handle custom node types that may not fit the new output specification format? 365 | 366 | ### Future Considerations 367 | 368 | 5. Should an option to set an invalidation signal be added to the remote COMBO type? 369 | 6. Should an option for a custom cache key for the remote COMBO type be added? 370 | 371 | ### Security Concerns 372 | 373 | 7. Implementation details needed for: 374 | - Rate limiting strategy 375 | - Input validation approach 376 | - Cache poisoning prevention measures 377 | - Access control mechanisms 378 | -------------------------------------------------------------------------------- /rfcs/0002-litegraph_native_reroute.md: -------------------------------------------------------------------------------- 1 | # RFC: LiteGraph Native Reroute 2 | 3 | - Start Date: 2025-01-12 4 | - Target Major Version: Frontend 1.15 5 | - Implemented PR: https://github.com/Comfy-Org/ComfyUI_frontend/pull/3151 6 | - Reference Issues: 7 | 8 | - 9 | - 10 | - 11 | 12 | ## Summary 13 | 14 | This RFC proposes replacing ComfyUI's current frontend-only reroute node implementation with a native LiteGraph reroute feature. The new implementation will treat reroutes as link metadata rather than full nodes, providing several benefits: 15 | 16 | - Simpler workflow JSON representation 17 | - Proper type safety throughout connection chains 18 | - Elimination of special-case handling for reroute nodes 19 | - Reduced complexity in workflow structures and graph traversal 20 | 21 | This change requires updates to both the workflow schema and LiteGraph.js library, with a migration path provided for existing workflows. 22 | 23 | ## Basic example 24 | 25 | ### New litegraph native reroute node 26 | 27 | ![new-reroute](https://github.com/user-attachments/assets/dddef61a-f975-4d69-b143-64505b6b9eaa) 28 | 29 | ![new-reroute-2](https://github.com/user-attachments/assets/c4c90291-38e6-429f-a22d-401848bb82d7) 30 | 31 | Representation in workflow json (0.4 Schema): 32 | 33 | ```json 34 | { 35 | "links": [ 36 | [ 37 | 13, 38 | 4, 39 | 1, 40 | 6, 41 | 0, 42 | "CLIP" 43 | ] 44 | ], 45 | "extra": { 46 | "linkExtensions": [ 47 | { 48 | "id": 13, 49 | "parentId": 3 50 | } 51 | ], 52 | "reroutes": [ 53 | { 54 | "id": 2, 55 | "pos": [ 56 | 239.8215789794922, 57 | 354.64306640625 58 | ], 59 | "linkIds": [ 60 | 13 61 | ] 62 | }, 63 | { 64 | "id": 3, 65 | "parentId": 2, 66 | "pos": [ 67 | 309.733154296875, 68 | 208.2829132080078 69 | ], 70 | "linkIds": [ 71 | 13 72 | ] 73 | } 74 | ] 75 | } 76 | } 77 | ``` 78 | 79 | Representation in workflow json (1.0 Schema): 80 | 81 | ```json 82 | { 83 | "links": [ 84 | { 85 | "id": 13, 86 | "origin_id": 4, 87 | "origin_slot": 1, 88 | "target_id": 6, 89 | "target_slot": 0, 90 | "type": "CLIP", 91 | "parentId": 3 92 | } 93 | ], 94 | "reroutes": [ 95 | { 96 | "id": 2, 97 | "pos": [ 98 | 239.8215789794922, 99 | 354.64306640625 100 | ], 101 | "linkIds": [ 102 | 13 103 | ] 104 | }, 105 | { 106 | "id": 3, 107 | "parentId": 2, 108 | "pos": [ 109 | 309.733154296875, 110 | 208.2829132080078 111 | ], 112 | "linkIds": [ 113 | 13 114 | ] 115 | } 116 | ] 117 | } 118 | ``` 119 | 120 | ### Old frontend-only reroute node 121 | 122 | ![old-reroute](https://github.com/user-attachments/assets/03279c0b-cb3d-4668-afa4-3d8304814d67) 123 | 124 | Representation in workflow json (0.4 & 1.0 Schema): 125 | 126 | ```json 127 | { 128 | "links": [ 129 | [ 130 | 11, 131 | 4, 132 | 1, 133 | 10, 134 | 0, 135 | "*" 136 | ], 137 | [ 138 | 12, 139 | 10, 140 | 0, 141 | 6, 142 | 0, 143 | "CLIP" 144 | ] 145 | ], 146 | "nodes": [ 147 | { 148 | "id": 10, 149 | "type": "Reroute", 150 | "pos": [ 151 | 245.87435913085938, 152 | 185.70533752441406 153 | ], 154 | "size": [ 155 | 75, 156 | 26 157 | ], 158 | "flags": {}, 159 | "order": 1, 160 | "mode": 0, 161 | "inputs": [ 162 | { 163 | "name": "", 164 | "type": "*", 165 | "link": 11 166 | } 167 | ], 168 | "outputs": [ 169 | { 170 | "name": "", 171 | "type": "CLIP", 172 | "links": [ 173 | 12 174 | ], 175 | "slot_index": 0 176 | } 177 | ], 178 | "properties": { 179 | "showOutputText": false, 180 | "horizontal": false 181 | } 182 | } 183 | ] 184 | } 185 | ``` 186 | 187 | ## Motivation 188 | 189 | The current frontend-only reroute implementation has several limitations and drawbacks that this proposal aims to address: 190 | 191 | 1. **Graph Complexity**: The legacy reroute implementation adds unnecessary complexity to the workflow graph by creating additional nodes and links. Each reroute point requires a full node object with inputs/outputs and properties, which bloats the workflow JSON and makes graph traversal more complex. 192 | 193 | 2. **Type Safety Issues**: The current implementation uses wildcard type matching (`"*"`) for inputs, which bypasses LiteGraph's type checking system. This can lead to type inconsistency issues when connecting nodes, as the reroute node may connect incompatible types without proper validation. 194 | 195 | 3. **Implementation Overhead**: The legacy reroute implementation has resulted in numerous special-case handling throughout the codebase. Many features require specific patches to handle reroute nodes differently (checking `if (node.name === 'Reroute')`) which increases maintenance burden and makes the codebase more fragile. See for a list of patches. 196 | 197 | By implementing reroutes as a native LiteGraph feature, we can: 198 | 199 | - Simplify workflow representations by treating reroutes as link metadata rather than nodes 200 | - Maintain proper type checking through the entire connection chain 201 | - Eliminate the need for special-case handling of reroute nodes in various features 202 | - Reduce the overall complexity of workflow JSON structures 203 | 204 | ## Detailed design 205 | 206 | ### Schema Changes 207 | 208 | The native reroute implementation introduces a cleaner schema structure that moves reroute information out of the node list and into dedicated sections. The key changes between Schema 0.4 and 1.0 are: 209 | 210 | 1. **Link Structure** 211 | 212 | - 0.4: Links are arrays `[id, origin_id, origin_slot, target_id, target_slot, type]` 213 | - 1.0: Links are objects with named properties: 214 | 215 | ``` 216 | { 217 | "id": number, 218 | "origin_id": number, 219 | "origin_slot": number, 220 | "target_id": number, 221 | "target_slot": number, 222 | "type": string, 223 | "parentId": number // References parent reroute point 224 | } 225 | ``` 226 | 227 | 2. **Reroute Structure** 228 | 229 | - 0.4: Reroutes are nested under `extra.reroutes` 230 | - 1.0: Reroutes are top-level array under `reroutes` 231 | 232 | ### Implementation Details 233 | 234 | 1. **Link Extension** 235 | 236 | - Each reroute point creates a virtual extension of the original link 237 | - Links maintain their original type throughout the reroute chain 238 | - Parent-child relationships between reroute points are tracked via `parentId` 239 | 240 | 2. **Position Management** 241 | 242 | - Reroute points store their canvas position as `[x, y]` coordinates 243 | - Multiple reroute points can be chained using the `parentId` reference 244 | - Each reroute point references its associated link(s) via `linkIds` 245 | 246 | 3. **Type Safety** 247 | 248 | - The link type is preserved from source to destination 249 | - No type conversion or wildcard matching occurs at reroute points 250 | - LiteGraph's native type checking remains active throughout the connection 251 | 252 | ### API Changes 253 | 254 | The LiteGraph.js library will be extended with new methods: 255 | 256 | ```javascript 257 | /** 258 | * Creates a new reroute and adds it to the graph. 259 | * @param pos Position in graph space 260 | * @param before The existing link segment (reroute, link) that will be after this reroute, 261 | * going from the node output to input. 262 | * @returns The newly created reroute - typically ignored. 263 | * Already implemented. 264 | */ 265 | LGraph.prototype.createReroute(pos: Point, before: LinkSegment): Reroute { 266 | ... 267 | } 268 | 269 | /** 270 | * Removes a reroute from the graph 271 | * @param id ID of reroute to remove 272 | * Already implemented. 273 | */ 274 | LGraph.prototype.removeReroute(id: RerouteId): void { 275 | ... 276 | } 277 | 278 | // New API endpoint (Refactor needed). 279 | LGraphCanvas.prototype.renderReroutePoints = function() { 280 | // Handles visual rendering of reroute points 281 | } 282 | ``` 283 | 284 | ### Migration Path 285 | 286 | To ensure a smooth transition, the migration strategy addresses both schema version changes and reroute implementation changes: 287 | 288 | #### Schema Migration (0.4 to 1.0) 289 | 290 | 1. **Automatic Detection**: The system will automatically detect the schema version based on the link format (array vs object) 291 | 292 | 2. **Link Structure Conversion**: 293 | 294 | ```javascript 295 | // Old format (0.4) 296 | [13, 4, 1, 6, 0, "CLIP"] 297 | // New format (1.0) 298 | { "id": 13, "origin_id": 4, "origin_slot": 1, "target_id": 6, "target_slot": 0, "type": "CLIP" } 299 | ``` 300 | 301 | 1. **Reroute Location**: Reroutes will be moved from `extra.reroutes` to the top-level `reroutes` array 302 | 303 | #### Legacy Reroute Migration 304 | 305 | 1. **Detection**: During workflow loading, the system will identify legacy reroute nodes by checking: 306 | 307 | - Node type === "Reroute" 308 | - Single input/output configuration 309 | - Presence in the `nodes` array 310 | 311 | 2. **Conversion Process**: 312 | 313 | ```javascript 314 | // For each legacy reroute node: 315 | { 316 | // Create new native reroute point 317 | const reroute = { 318 | id: legacyNode.id, 319 | pos: legacyNode.pos, 320 | linkIds: [outputLink.id] 321 | }; 322 | 323 | // Connect original input to final output 324 | const newLink = { 325 | id: generateNewId(), 326 | origin_id: inputLink.origin_id, 327 | origin_slot: inputLink.origin_slot, 328 | target_id: outputLink.target_id, 329 | target_slot: outputLink.target_slot, 330 | type: outputLink.type 331 | }; 332 | 333 | // Remove old node and links. 334 | // Not fully connected legacy reroutes will be removed. 335 | removeNode(legacyNode); 336 | removeLinks([inputLink.id, outputLink.id]); 337 | } 338 | ``` 339 | 340 | 1. **Validation**: After conversion, the system will verify: 341 | 342 | - Type consistency is maintained 343 | - All connections are preserved 344 | - Graph topology remains functional 345 | 346 | #### Backwards Compatibility 347 | 348 | - The system will maintain support for loading legacy reroute nodes throughout the targeted major version release cycle 349 | - Warning messages will be displayed when legacy reroutes are detected 350 | - Documentation will be updated to encourage migration to native reroutes 351 | 352 | ## Drawbacks 353 | 354 | Several important considerations should be weighed before implementing this proposal: 355 | 356 | 1. **Implementation Complexity** 357 | - Requires significant modifications to the core LiteGraph.js library 358 | - Need to implement new rendering logic for reroute points 359 | - Complex migration logic needed to handle legacy reroute nodes 360 | - Additional testing burden to ensure compatibility across different workflow versions 361 | 362 | 2. **Breaking Changes** 363 | - Schema changes from 0.4 to 1.0 require all tools in the ecosystem to be updated 364 | - Third-party applications that directly manipulate workflow JSON will need modifications 365 | - Custom node implementations that interact with reroute nodes may break 366 | 367 | 3. **Performance Considerations** 368 | - Additional overhead in link rendering due to reroute point calculations 369 | - Increased memory usage from storing reroute metadata for each affected link 370 | - Potential impact on workflow loading times during migration 371 | 372 | 4. **User Experience Impact** 373 | - Users familiar with the current node-based reroute system will need to adapt 374 | - Documentation and tutorials will need significant updates 375 | - Temporary confusion during the transition period as both systems may coexist 376 | 377 | 5. **Alternative Approaches** 378 | - The current node-based implementation, while not ideal, is functional 379 | - Improvements to the existing system might be less disruptive than a complete redesign 380 | - User-space solutions might be sufficient for some use cases 381 | 382 | 6. **Maintenance Burden** 383 | - New code paths need long-term maintenance 384 | - Migration logic will need to be maintained for backward compatibility 385 | - Potential for new edge cases and bugs in the rerouting system 386 | 387 | ## Unresolved questions 388 | 389 | 1. **Unconnected Reroutes**: The current implementation in the litegraph library only allows reroutes to be created on existing links, which differs from the legacy reroute behavior that allowed unconnected reroutes. We need to determine: 390 | - Whether supporting unconnected reroutes is a necessary feature 391 | - If implemented, how would unconnected reroutes be represented in the schema 392 | - The potential impact on graph validation and type checking 393 | - Use cases where unconnected reroutes provide meaningful functionality 394 | -------------------------------------------------------------------------------- /rfcs/0005-subgraph.md: -------------------------------------------------------------------------------- 1 | # RFC: Subgraph 2 | 3 | - Start Date: 2025-01-12 4 | - Target Major Version: Frontend 1.22 5 | - Reference Issues: 6 | - https://github.com/Comfy-Org/ComfyUI_frontend/issues/1077 7 | - https://github.com/comfyanonymous/ComfyUI/issues/5353 8 | - https://github.com/comfyanonymous/ComfyUI/issues/4094 9 | - [... many more] 10 | - https://github.com/comfyanonymous/ComfyUI/issues/150 11 | 12 | ## Summary 13 | 14 | Subgraphs separate sections of workflows (graphs) into distinct, logical entities. This separation provides re-usability and modularity, and can drastically reduce visual and cognitive complexity. 15 | 16 | - A subgraph has its own inputs and outputs, which can be used to connect nodes on the parent graph to those in the subgraph. 17 | - Subgraphs added to a graph multiple times only need to be edited once to update all instances 18 | 19 | ## Basic example 20 | 21 | Simple design mock-ups. 22 | 23 | ### Main workflow 24 | 25 | The parent / primary workflow, as opened normally. 26 | 27 | ![Parent workflow example](https://github.com/user-attachments/assets/255e5a43-fd7a-48b4-beb5-c9e8e09af4bd) 28 | 29 | ### Subgraph 30 | 31 | The contents of the subgraph, as displayed after opening. 32 | 33 | ![Subgraph example](https://github.com/user-attachments/assets/b765ef67-3ac3-4ba0-90ba-0465471b970f) 34 | 35 | ## Motivation 36 | 37 | The ComfyUI community has made several requests for this feature. In any workflow with more than a handful of nodes, subgraphs can significantly improve UX. Requests for subgraph-like features go back to at least March 2023. 38 | 39 | 1. **User experience**: This feature drastically improves UX when reading or editing even moderately-sized workflows. 40 | 2. **Reduced Complexity**: Subgraphs allow a reduction in visual and cognitive complexity by combining an entire section of a workflow into a single visual representation. This also supports mental partitioning of the sum effect of the subgraph. 41 | 3. **Re-usability & modularity**: A subgraph can be be added multiple times to a workflow, and to other workflows. By default, the subgraph will be linked rather than duplicated; editing a subgraph effectively updates all instances. 42 | 4. **Performance**: By visually combining groups of graph objects (e.g. nodes) into a single object, subgraphs remove soft-limits on workflow size due to performance limitations. Subgraphs linked multiple times can also reduce workflow export size, improving storage and parse/load times. 43 | 44 | ## Detailed design 45 | 46 | A subgraph is a standard ComfyUI workflow with the addition of inputs and outputs. By extending the existing workflow management UI, some of the specifications will require only minor changes. 47 | 48 | ### Design requirements 49 | 50 | - The exact same tools are used to edit a workflow & a subgraph 51 | - Can open/edit subgraph instance or template in its own tab 52 | - Can also open/edit subgraph in-place on a workflow, by double-click or similar 53 | - Subgraphs embedded in an open workflow can be copied into the users’ workflow library 54 | - Subgraphs always have two special, non-removable nodes: `Inputs` and `Outputs` (`ionodes`) 55 | - Ensure design does not prevent adding an option to hide these in the future 56 | - Simple method to drop noodles onto `ionodes`, creating new in/out slots in the parent graph 57 | - Use existing UX when connecting links to a node with empty slots 58 | - Allow type reset of empty `ionode` slots via simple interaction 59 | - Individual node widgets can be exposed directly as widgets in the parent graph 60 | - Highlighted / visually flagged in some fashion when viewed in the subgraph 61 | - A panel that lists all inputs, outputs, and exposed widgets 62 | - Add / edit / remove 63 | - Widgets: Edit / remove, with add as a future milestone 64 | 65 | ### Connecting between graph and subgraph 66 | 67 | ```c 68 | // Current: A node to node connection 69 | Node output --> Node input 70 | 71 | // Proposed: A node to subgraph node connection 72 | // Workflow view 73 | Node output --> Subgraph input 74 | // Subgraph view 75 | Subgraph input --> Subgraph node input 76 | ``` 77 | 78 | ### Server API 79 | 80 | Subgraphs are to be implemented as a frontend-only feature. When assembling a prompt to queue, the frontend will recursively flatten all subgraphs, effectively removing the subgraph from the serialised prompt. 81 | 82 | A tuple of the node ID and subgraph instance ID will be mapped to a new unique identifier, to be used in API comms with the server. 83 | 84 | ### Subgraph library 85 | 86 | Precise UI implementation details are not the intention of this section, and can be adapted and iterated with user feedback. 87 | 88 | - Subgraphs will be stored alongside workflows, with at least an icon to indicate that they are already used as subgraphs. 89 | - Filters and other options may be added. 90 | - Any ComfyUI workflow can be used as a subgraph; the subgraph input and output nodes will be added on first use. 91 | 92 | ### Incompatible data types (e.g. COMBO) 93 | 94 | When connecting links to `ionodes`, existing connections on the other side may block the new connection from being made. Example: 95 | 96 | - Inside a subgraph, a single input is connected to a node FLOAT input with a min/max range of 0.0 - 1.0 97 | - Out in the main workflow, a new link is connected from a FLOAT output that has a valid range of 50.0 - 100.0 98 | - The connection is blocked as the linked types mismatch 99 | - A special UI indication should be implemented 100 | - When implementing, this should be designed so that an option could later be added to allow a keyboard modifier or other method to override this behaviour, and simply disconnect the incompatible links. 101 | 102 | ### Legacy compatibility 103 | 104 | This is a significant change, but should provide compatibility with existing systems where practical. 105 | 106 | 1. Allow export in compatibility mode - use old format, with subgraphs placed directly in the workflow inside a group. 107 | 2. Old APIs 108 | - Present a virtual graph by recursively flattening the actual graph 109 | - Allow extensions to interact normally with litegraph via virtual graph `Proxy`(?), or getter/setters for e.g. 110 | - `LGraph.nodes` 111 | - `LGraph.groups` 112 | - etc 113 | 3. New APIs 114 | - Present subgraph-aware nested graphs 115 | 116 | ### Schema updates 117 | 118 | A workflow will include a single copy of every subgraph it contains. Multiple subgraphs can then be added to a workflow, each only adding a few fields of metadata. 119 | 120 | 1. **Workflow ID** 121 | 1. All workflows will have a UUID assigned during creation, or on load if undefined 122 | 2. An instance of a subgraph is referenced by ID within the workflow itself. This ensures the workflow remains a self-contained unit, and cannot be broken by forgetting to copy supporting files. 123 | 3. The unique ID allows comparison of subgraphs to other revisions (e.g. in other workflows or the local workflow library). 124 | 4. There is no automatic update other workflows, but the design allows a user-controlled mechanism to update workflows with older revisions, without the current requirement to replay all changes by hand. No special handling of destructive changes (e.g. remove subgraph input) is currently planned. 125 | 2. **Workflow revision** 126 | 1. Allow copy/paste of any revision as-is, e.g.: 127 | 1. Open a workflow with an old revision of a subgraph in the workflow library. 128 | 2. Copy the subgraph from inside the workflow 129 | 3. Paste the subgraph - old revision is pasted, no update to workflow definitions 130 | 2. Add from toolbox / search always uses library revision 131 | 3. Do not allow multiple revisions in a single workflow (unnecessary complexity - solution for this is to clone the subgraph, giving it a new ID with no connection to the old ID) 132 | 3. **Nesting** 133 | 1. Recursive, one-to-many / top-down nesting of graphs 134 | 4. **Subgraph inputs & outputs** (`ionodes`) 135 | - Which widgets are exposed as input/output 136 | 137 | ### Schema requirements 138 | 139 | Proposed extensions to the existing workflow schema (all workflows) 140 | 141 | ```ts 142 | // Proposed extensions to workflow schema 143 | interface WorkflowExtensions extends SerialisableGraph { 144 | /** Generated once, never mutated. Default: Generated UUID v4 */ 145 | readonly id: UUIDv4 146 | /** Automatically incremented on save, if changed. Default: 0 */ 147 | revision: int 148 | 149 | subgraphs?: SubgraphInstance[] 150 | 151 | /** Defines objects referenced elsewhere in the schema */ 152 | definitions?: { 153 | /** Definitions of all subgraphs used in this workflow. */ 154 | subgraphs?: Record[] 155 | } 156 | } 157 | ``` 158 | 159 | ### An instance of a subgraph, when placed on a (sub)graph 160 | 161 | ```ts 162 | interface SubgraphInstance { 163 | id: NodeId 164 | subgraphId: UUIDv4 165 | bounding: Rect 166 | name?: string 167 | color?: string 168 | background?: string 169 | // ... any other display properties, similar to node properties 170 | } 171 | ``` 172 | 173 | ### Subgraph definition 174 | 175 | ```ts 176 | /** 177 | * Defines a subgraph and its contents. 178 | * Can be referenced multiple times in a schema. 179 | */ 180 | interface SubgraphDefinition extends WorkflowExtensions { 181 | /** An input of the subgraph itself. Similar to a reroute, it exists in both the graph and subgraph. */ 182 | inputs: SubgraphIO[] 183 | /** An output of the subgraph itself. Similar to a reroute, it exists in both the graph and subgraph. */ 184 | outputs: SubgraphIO[] 185 | /** 186 | * A list of node widgets displayed in the parent graph, on the subgraph object. 187 | */ 188 | widgets: ExposedWidget[] 189 | } 190 | 191 | /** Subgraph I/O slots */ 192 | interface SubgraphIO { 193 | id: string 194 | type: string 195 | // ... other display properties, similar to node inputs and outputs 196 | } 197 | 198 | /** A reference to a node widget shown in the parent graph */ 199 | interface ExposedWidget { 200 | id: NodeId 201 | name: string 202 | } 203 | ``` 204 | 205 | NB: Can be easily adapted to match current pending RFC 02 and/or RFC 04 specifications. RFC 04 in particular would drastically simplify some implementation details and provide unified UX. Caveat is that it would prevent 206 | 207 | ### Examples 208 | 209 | Workflow example 210 | 211 | ```jsonc 212 | { 213 | "id": "10000000-0000-0000-0000-000000000000", 214 | "revision": 86, 215 | // Object definitions - define once, use many instances 216 | "definitions": { 217 | "subgraphs": { 218 | // Subgraph workflow (definition) 219 | "20000000-0000-0000-0000-000000000000": { 220 | "revision": 3, 221 | "inputs": [{ "id": 0, "type": "IMAGE" }], 222 | "outputs": [ 223 | { "id": 0, "type": "IMAGE" }, 224 | { "id": 1, "type": "IMAGE" } 225 | ], 226 | "last_node_id": 1, 227 | "last_link_id": 0, 228 | "nodes": [ 229 | { 230 | "id": 1, 231 | "type": "Invert Image", 232 | "pos": [410, 380], 233 | "size": [210, 46], 234 | "inputs": [{ "name": "image", "type": "IMAGE", "link": 3 }], 235 | "outputs": [ 236 | { "name": "inverted", "type": "IMAGE", "links": [4] }, 237 | { "name": "sharpened", "type": "IMAGE", "links": [5] } 238 | ] 239 | // ... truncated example node 240 | } 241 | ], 242 | "links": [ 243 | [3, "inputs", 0, 1, 0, "IMAGE"], 244 | [4, 1, 0, "outputs", 0, "IMAGE"], 245 | [5, 1, 1, "outputs", 1, "IMAGE"] 246 | ], 247 | "groups": [], 248 | "config": {}, 249 | "extra": { "ds": { "scale": 1, "offset": [0, 0] } }, 250 | "version": 2 251 | } 252 | } 253 | }, 254 | "subgraphs": [ 255 | // An instance of a subgraph 256 | { 257 | "id": "0", 258 | "subgraphId": "20000000-0000-0000-0000-000000000000", 259 | "bounding": [100, 100, 160, 120] 260 | } 261 | // [...more subgraph instances] 262 | ], 263 | // The rest of the main workflow 264 | "last_node_id": 1, 265 | "last_link_id": 0, 266 | "nodes": [ 267 | { 268 | "id": 1, 269 | "type": "VAEDecode", 270 | "pos": [410, 380], 271 | "size": [210, 46] 272 | // ... truncated example node 273 | } 274 | ], 275 | "links": [], 276 | "groups": [], 277 | "config": {}, 278 | "extra": { "ds": { "scale": 1, "offset": [0, 0] } }, 279 | "version": 2 280 | } 281 | ``` 282 | 283 | ### Definitions 284 | 285 | Any mention of UUID refers to v4 UUIDs. 286 | 287 | ### Future considerations 288 | 289 | Implementation should not block desirable future features: 290 | 291 | - **Execute subgraph**: Execute a subgraph (e.g. a subgraph with no _required_ inputs). 292 | - **Re-execute**: Execute only the nodes within the subgraph, to edit / fine-tune any changes inside, using forced-cached inputs, and halting execution at the subgraph boundary. 293 | 294 | ## Drawbacks 295 | 296 | 1. **Breaking change** 297 | 1. Workflows using linked subgraphs will not function correctly in older versions of ComfyUI. 298 | 2. It is possible to export to an old format, or create a tool to convert workflows to the original format. 299 | 2. **Update complexity** 300 | 1. The revision of a subgraph in a users’ library may not match the subgraph revision in a workflow. 301 | 2. Users can update to the latest revision, however this may break connections or cause other issues, similar to “Recreate node” (see Unresolved questions) 302 | 3. No accounting for branching of revisions 303 | 3. **Extension compatibility** 304 | 1. Using a `Proxy` to provide a compatibility layer may impact some extensions 305 | 2. Non-proxy alternatives require greater effort 306 | 4. **Code complexity** 307 | 1. This is a complex change and cannot simply be brought in with a single PR 308 | 2. Hours required to implement and test 309 | 310 | ## Alternatives 311 | 312 | 1. **Group node**: Group nodes were the original solution to provide a subset of functionality. Attempting to re-engineer group nodes to provide the complete set of features is impractical. 313 | 2. https://github.com/vivax3794/ComfyUI-Sub-Nodes: A third party extension with a decent feature coverage 314 | 1. Limitations on workflow format 315 | 2. Implements many features via custom nodes - the right approach for an extension, but cannot be easily converted to become new features in core libraries 316 | 3. **Subgraph widget**: Representing a subgraph inside a node as a widget. Provides a small UX improvement over the status quo, but with a disproportionately high effort requirement. 317 | 4. **Do nothing**: User frustration, soft-limits on workflows, inefficient, messy, impractical. 318 | 319 | ## Adoption strategy 320 | 321 | - [ ] TODO: Requires finalisation of unresolved questions. 322 | 323 | If we implement this proposal, how will existing ComfyUI users and developers adopt it? Fastly. 324 | 325 | 1. **Convert group nodes to subgraphs**: This is planned, but outside the scope of this RFC. 326 | 2. **Extensions**: Consider old & new 327 | 1. Updates 328 | 2. New APIs 329 | 3. **Schema concerns**: Schema concerns will mirror workflow v2.0 schema RFC and any discussion that even partially includes that RFC should happen there. 330 | 331 | ## Unresolved questions 332 | 333 | 1. Is there a reason not to add id & revision to base workflows? 334 | 2. Is there any practical, common need for revision schema migrations? 335 | 1. If so, is it a need that must be in core? 336 | 3. How badly will my plan to flatten subgraphs→prompt break extensions? 337 | 4. When connecting from the input `ionode` to multiple widgets, which widget is represented on the subgraph in the main workflow? 338 | 1. An input is a 1-to-1 match with a data type 339 | 2. There can be multiple widgets that represent a data type (e.g. slider, number) 340 | 3. Simply restrict to exact matches? 341 | -------------------------------------------------------------------------------- /specifications/node_def.json: -------------------------------------------------------------------------------- 1 | { 2 | "$ref": "#/definitions/ComfyNodeDefV2", 3 | "definitions": { 4 | "ComfyNodeDefV2": { 5 | "type": "object", 6 | "properties": { 7 | "inputs": { 8 | "type": "object", 9 | "additionalProperties": { 10 | "anyOf": [ 11 | { 12 | "type": "object", 13 | "properties": { 14 | "default": { 15 | "anyOf": [ 16 | { 17 | "type": "number" 18 | }, 19 | { 20 | "type": "array", 21 | "items": { 22 | "type": "number" 23 | } 24 | } 25 | ] 26 | }, 27 | "defaultInput": { 28 | "type": "boolean" 29 | }, 30 | "forceInput": { 31 | "type": "boolean" 32 | }, 33 | "tooltip": { 34 | "type": "string" 35 | }, 36 | "hidden": { 37 | "type": "boolean" 38 | }, 39 | "advanced": { 40 | "type": "boolean" 41 | }, 42 | "rawLink": { 43 | "type": "boolean" 44 | }, 45 | "lazy": { 46 | "type": "boolean" 47 | }, 48 | "min": { 49 | "type": "number" 50 | }, 51 | "max": { 52 | "type": "number" 53 | }, 54 | "step": { 55 | "type": "number" 56 | }, 57 | "display": { 58 | "type": "string", 59 | "enum": [ 60 | "slider", 61 | "number", 62 | "knob" 63 | ] 64 | }, 65 | "control_after_generate": { 66 | "type": "boolean" 67 | }, 68 | "type": { 69 | "type": "string", 70 | "const": "INT" 71 | }, 72 | "name": { 73 | "type": "string" 74 | }, 75 | "isOptional": { 76 | "type": "boolean" 77 | } 78 | }, 79 | "required": [ 80 | "type", 81 | "name" 82 | ], 83 | "additionalProperties": true 84 | }, 85 | { 86 | "type": "object", 87 | "properties": { 88 | "default": { 89 | "anyOf": [ 90 | { 91 | "type": "number" 92 | }, 93 | { 94 | "type": "array", 95 | "items": { 96 | "type": "number" 97 | } 98 | } 99 | ] 100 | }, 101 | "defaultInput": { 102 | "type": "boolean" 103 | }, 104 | "forceInput": { 105 | "type": "boolean" 106 | }, 107 | "tooltip": { 108 | "type": "string" 109 | }, 110 | "hidden": { 111 | "type": "boolean" 112 | }, 113 | "advanced": { 114 | "type": "boolean" 115 | }, 116 | "rawLink": { 117 | "type": "boolean" 118 | }, 119 | "lazy": { 120 | "type": "boolean" 121 | }, 122 | "min": { 123 | "type": "number" 124 | }, 125 | "max": { 126 | "type": "number" 127 | }, 128 | "step": { 129 | "type": "number" 130 | }, 131 | "display": { 132 | "type": "string", 133 | "enum": [ 134 | "slider", 135 | "number", 136 | "knob" 137 | ] 138 | }, 139 | "round": { 140 | "anyOf": [ 141 | { 142 | "type": "number" 143 | }, 144 | { 145 | "type": "boolean", 146 | "const": false 147 | } 148 | ] 149 | }, 150 | "type": { 151 | "type": "string", 152 | "const": "FLOAT" 153 | }, 154 | "name": { 155 | "type": "string" 156 | }, 157 | "isOptional": { 158 | "type": "boolean" 159 | } 160 | }, 161 | "required": [ 162 | "type", 163 | "name" 164 | ], 165 | "additionalProperties": true 166 | }, 167 | { 168 | "type": "object", 169 | "properties": { 170 | "default": { 171 | "type": "boolean" 172 | }, 173 | "defaultInput": { 174 | "type": "boolean" 175 | }, 176 | "forceInput": { 177 | "type": "boolean" 178 | }, 179 | "tooltip": { 180 | "type": "string" 181 | }, 182 | "hidden": { 183 | "type": "boolean" 184 | }, 185 | "advanced": { 186 | "type": "boolean" 187 | }, 188 | "rawLink": { 189 | "type": "boolean" 190 | }, 191 | "lazy": { 192 | "type": "boolean" 193 | }, 194 | "label_on": { 195 | "type": "string" 196 | }, 197 | "label_off": { 198 | "type": "string" 199 | }, 200 | "type": { 201 | "type": "string", 202 | "const": "BOOLEAN" 203 | }, 204 | "name": { 205 | "type": "string" 206 | }, 207 | "isOptional": { 208 | "type": "boolean" 209 | } 210 | }, 211 | "required": [ 212 | "type", 213 | "name" 214 | ], 215 | "additionalProperties": true 216 | }, 217 | { 218 | "type": "object", 219 | "properties": { 220 | "default": { 221 | "type": "string" 222 | }, 223 | "defaultInput": { 224 | "type": "boolean" 225 | }, 226 | "forceInput": { 227 | "type": "boolean" 228 | }, 229 | "tooltip": { 230 | "type": "string" 231 | }, 232 | "hidden": { 233 | "type": "boolean" 234 | }, 235 | "advanced": { 236 | "type": "boolean" 237 | }, 238 | "rawLink": { 239 | "type": "boolean" 240 | }, 241 | "lazy": { 242 | "type": "boolean" 243 | }, 244 | "multiline": { 245 | "type": "boolean" 246 | }, 247 | "dynamicPrompts": { 248 | "type": "boolean" 249 | }, 250 | "defaultVal": { 251 | "type": "string" 252 | }, 253 | "placeholder": { 254 | "type": "string" 255 | }, 256 | "type": { 257 | "type": "string", 258 | "const": "STRING" 259 | }, 260 | "name": { 261 | "type": "string" 262 | }, 263 | "isOptional": { 264 | "type": "boolean" 265 | } 266 | }, 267 | "required": [ 268 | "type", 269 | "name" 270 | ], 271 | "additionalProperties": true 272 | }, 273 | { 274 | "type": "object", 275 | "properties": { 276 | "default": {}, 277 | "defaultInput": { 278 | "type": "boolean" 279 | }, 280 | "forceInput": { 281 | "type": "boolean" 282 | }, 283 | "tooltip": { 284 | "type": "string" 285 | }, 286 | "hidden": { 287 | "type": "boolean" 288 | }, 289 | "advanced": { 290 | "type": "boolean" 291 | }, 292 | "rawLink": { 293 | "type": "boolean" 294 | }, 295 | "lazy": { 296 | "type": "boolean" 297 | }, 298 | "control_after_generate": { 299 | "type": "boolean" 300 | }, 301 | "image_upload": { 302 | "type": "boolean" 303 | }, 304 | "image_folder": { 305 | "type": "string", 306 | "enum": [ 307 | "input", 308 | "output", 309 | "temp" 310 | ] 311 | }, 312 | "allow_batch": { 313 | "type": "boolean" 314 | }, 315 | "video_upload": { 316 | "type": "boolean" 317 | }, 318 | "remote": { 319 | "type": "object", 320 | "properties": { 321 | "route": { 322 | "anyOf": [ 323 | { 324 | "type": "string", 325 | "format": "uri" 326 | }, 327 | { 328 | "type": "string", 329 | "pattern": "^\\/" 330 | } 331 | ] 332 | }, 333 | "refresh": { 334 | "anyOf": [ 335 | { 336 | "type": "number", 337 | "minimum": -9007199254740991, 338 | "maximum": 9007199254740991 339 | }, 340 | { 341 | "type": "number", 342 | "maximum": 9007199254740991, 343 | "minimum": -9007199254740991 344 | } 345 | ] 346 | }, 347 | "response_key": { 348 | "type": "string" 349 | }, 350 | "query_params": { 351 | "type": "object", 352 | "additionalProperties": { 353 | "type": "string" 354 | } 355 | }, 356 | "refresh_button": { 357 | "type": "boolean" 358 | }, 359 | "control_after_refresh": { 360 | "type": "string", 361 | "enum": [ 362 | "first", 363 | "last" 364 | ] 365 | }, 366 | "timeout": { 367 | "type": "number", 368 | "minimum": 0 369 | }, 370 | "max_retries": { 371 | "type": "number", 372 | "minimum": 0 373 | } 374 | }, 375 | "required": [ 376 | "route" 377 | ], 378 | "additionalProperties": false 379 | }, 380 | "options": { 381 | "type": "array", 382 | "items": { 383 | "type": [ 384 | "string", 385 | "number" 386 | ] 387 | } 388 | }, 389 | "type": { 390 | "type": "string", 391 | "const": "COMBO" 392 | }, 393 | "name": { 394 | "type": "string" 395 | }, 396 | "isOptional": { 397 | "type": "boolean" 398 | } 399 | }, 400 | "required": [ 401 | "type", 402 | "name" 403 | ], 404 | "additionalProperties": true 405 | }, 406 | { 407 | "type": "object", 408 | "properties": { 409 | "default": {}, 410 | "defaultInput": { 411 | "type": "boolean" 412 | }, 413 | "forceInput": { 414 | "type": "boolean" 415 | }, 416 | "tooltip": { 417 | "type": "string" 418 | }, 419 | "hidden": { 420 | "type": "boolean" 421 | }, 422 | "advanced": { 423 | "type": "boolean" 424 | }, 425 | "rawLink": { 426 | "type": "boolean" 427 | }, 428 | "lazy": { 429 | "type": "boolean" 430 | }, 431 | "type": { 432 | "type": "string" 433 | }, 434 | "name": { 435 | "type": "string" 436 | }, 437 | "isOptional": { 438 | "type": "boolean" 439 | } 440 | }, 441 | "required": [ 442 | "type", 443 | "name" 444 | ], 445 | "additionalProperties": true 446 | } 447 | ] 448 | } 449 | }, 450 | "outputs": { 451 | "type": "array", 452 | "items": { 453 | "type": "object", 454 | "properties": { 455 | "index": { 456 | "type": "number" 457 | }, 458 | "name": { 459 | "type": "string" 460 | }, 461 | "type": { 462 | "type": "string" 463 | }, 464 | "is_list": { 465 | "type": "boolean" 466 | }, 467 | "options": { 468 | "type": "array" 469 | }, 470 | "tooltip": { 471 | "type": "string" 472 | } 473 | }, 474 | "required": [ 475 | "index", 476 | "name", 477 | "type", 478 | "is_list" 479 | ], 480 | "additionalProperties": false 481 | } 482 | }, 483 | "hidden": { 484 | "type": "object", 485 | "additionalProperties": {} 486 | }, 487 | "name": { 488 | "type": "string" 489 | }, 490 | "display_name": { 491 | "type": "string" 492 | }, 493 | "description": { 494 | "type": "string" 495 | }, 496 | "category": { 497 | "type": "string" 498 | }, 499 | "output_node": { 500 | "type": "boolean" 501 | }, 502 | "python_module": { 503 | "type": "string" 504 | }, 505 | "deprecated": { 506 | "type": "boolean" 507 | }, 508 | "experimental": { 509 | "type": "boolean" 510 | } 511 | }, 512 | "required": [ 513 | "inputs", 514 | "outputs", 515 | "name", 516 | "display_name", 517 | "description", 518 | "category", 519 | "output_node", 520 | "python_module" 521 | ], 522 | "additionalProperties": false 523 | } 524 | }, 525 | "$schema": "http://json-schema.org/draft-07/schema#" 526 | } 527 | --------------------------------------------------------------------------------