├── requirements.txt
├── exports.py
├── README.md
├── .gitignore
├── LICENSE
├── web
└── js
│ └── cw.js
└── __init__.py
/requirements.txt:
--------------------------------------------------------------------------------
1 | tqdm
2 | aiohttp-retry
--------------------------------------------------------------------------------
/exports.py:
--------------------------------------------------------------------------------
1 | SCHEMA_VERSION = "1.0"
2 |
3 | def generate_export_json_file(workflow_json, snapshot_json, files_data, pip_reqs, os_type, python_version):
4 | return {
5 | "format" : "comfyui_launcher",
6 | "version" : SCHEMA_VERSION,
7 | "workflow_json" : workflow_json,
8 | "snapshot_json" : snapshot_json,
9 | "files" : files_data,
10 | "pip_requirements" : pip_reqs,
11 | "os" : os_type,
12 | "python_version" : python_version
13 | }
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # ComfyUI-ComfyWorkflows
2 |
3 | The best way to run, share, & discover thousands of ComfyUI workflows.
4 |
5 | [https://comfyworkflows.com](https://comfyworkflows.com)
6 |
7 | [](https://discord.gg/kXS43yTRNA)
8 |
9 | ## Installation
10 |
11 | ```bash
12 | cd ComfyUI/custom_nodes/
13 | git clone https://github.com/thecooltechguy/ComfyUI-ComfyWorkflows
14 | cd ComfyUI-ComfyWorkflows/
15 | python -m pip install -r requirements.txt
16 | ```
17 |
18 | Restart ComfyUI.
19 |
20 | ## Usage
21 |
22 | ### Deploy your workflow online
23 | This will enable anyone to run your workflow online, without having to install ComfyUI.
24 |
25 | 1. Create a free account on [https://comfyworkflows.com](https://comfyworkflows.com)
26 | 2. Share your workflow at: [https://comfyworkflows.com/upload](https://comfyworkflows.com/upload)
27 | 3. On the workflow's page, click **Enable cloud workflow** and copy the code displayed.
28 | 4. Open your workflow in your local ComfyUI.
29 | 5. Click on the **Upload to ComfyWorkflows** button in the menu.
30 | 5. Enter your code and click **Upload**
31 | 6. After a few minutes, your workflow will be runnable online by anyone, via the workflow's URL at ComfyWorkflows.
32 |
33 | ## Upcoming features
34 |
35 | - [ ] Import any workflow from ComfyWorkflows with zero setup.
36 | - [ ] Backup your local private workflows to the cloud.
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | # Byte-compiled / optimized / DLL files
2 | __pycache__/
3 | *.py[cod]
4 | *$py.class
5 |
6 | # C extensions
7 | *.so
8 |
9 | # Distribution / packaging
10 | .Python
11 | build/
12 | develop-eggs/
13 | dist/
14 | downloads/
15 | eggs/
16 | .eggs/
17 | lib/
18 | lib64/
19 | parts/
20 | sdist/
21 | var/
22 | wheels/
23 | share/python-wheels/
24 | *.egg-info/
25 | .installed.cfg
26 | *.egg
27 | MANIFEST
28 |
29 | # PyInstaller
30 | # Usually these files are written by a python script from a template
31 | # before PyInstaller builds the exe, so as to inject date/other infos into it.
32 | *.manifest
33 | *.spec
34 |
35 | # Installer logs
36 | pip-log.txt
37 | pip-delete-this-directory.txt
38 |
39 | # Unit test / coverage reports
40 | htmlcov/
41 | .tox/
42 | .nox/
43 | .coverage
44 | .coverage.*
45 | .cache
46 | nosetests.xml
47 | coverage.xml
48 | *.cover
49 | *.py,cover
50 | .hypothesis/
51 | .pytest_cache/
52 | cover/
53 |
54 | # Translations
55 | *.mo
56 | *.pot
57 |
58 | # Django stuff:
59 | *.log
60 | local_settings.py
61 | db.sqlite3
62 | db.sqlite3-journal
63 |
64 | # Flask stuff:
65 | instance/
66 | .webassets-cache
67 |
68 | # Scrapy stuff:
69 | .scrapy
70 |
71 | # Sphinx documentation
72 | docs/_build/
73 |
74 | # PyBuilder
75 | .pybuilder/
76 | target/
77 |
78 | # Jupyter Notebook
79 | .ipynb_checkpoints
80 |
81 | # IPython
82 | profile_default/
83 | ipython_config.py
84 |
85 | # pyenv
86 | # For a library or package, you might want to ignore these files since the code is
87 | # intended to run in multiple environments; otherwise, check them in:
88 | # .python-version
89 |
90 | # pipenv
91 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
92 | # However, in case of collaboration, if having platform-specific dependencies or dependencies
93 | # having no cross-platform support, pipenv may install dependencies that don't work, or not
94 | # install all needed dependencies.
95 | #Pipfile.lock
96 |
97 | # poetry
98 | # Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control.
99 | # This is especially recommended for binary packages to ensure reproducibility, and is more
100 | # commonly ignored for libraries.
101 | # https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control
102 | #poetry.lock
103 |
104 | # pdm
105 | # Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control.
106 | #pdm.lock
107 | # pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it
108 | # in version control.
109 | # https://pdm.fming.dev/#use-with-ide
110 | .pdm.toml
111 |
112 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm
113 | __pypackages__/
114 |
115 | # Celery stuff
116 | celerybeat-schedule
117 | celerybeat.pid
118 |
119 | # SageMath parsed files
120 | *.sage.py
121 |
122 | # Environments
123 | .env
124 | .venv
125 | env/
126 | venv/
127 | ENV/
128 | env.bak/
129 | venv.bak/
130 |
131 | # Spyder project settings
132 | .spyderproject
133 | .spyproject
134 |
135 | # Rope project settings
136 | .ropeproject
137 |
138 | # mkdocs documentation
139 | /site
140 |
141 | # mypy
142 | .mypy_cache/
143 | .dmypy.json
144 | dmypy.json
145 |
146 | # Pyre type checker
147 | .pyre/
148 |
149 | # pytype static type analyzer
150 | .pytype/
151 |
152 | # Cython debug symbols
153 | cython_debug/
154 |
155 | # PyCharm
156 | # JetBrains specific template is maintained in a separate JetBrains.gitignore that can
157 | # be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore
158 | # and can be added to the global gitignore or merged into this file. For a more nuclear
159 | # option (not recommended) you can uncomment the following to ignore the entire idea folder.
160 | #.idea/
161 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | Apache License
2 | Version 2.0, January 2004
3 | http://www.apache.org/licenses/
4 |
5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
6 |
7 | 1. Definitions.
8 |
9 | "License" shall mean the terms and conditions for use, reproduction,
10 | and distribution as defined by Sections 1 through 9 of this document.
11 |
12 | "Licensor" shall mean the copyright owner or entity authorized by
13 | the copyright owner that is granting the License.
14 |
15 | "Legal Entity" shall mean the union of the acting entity and all
16 | other entities that control, are controlled by, or are under common
17 | control with that entity. For the purposes of this definition,
18 | "control" means (i) the power, direct or indirect, to cause the
19 | direction or management of such entity, whether by contract or
20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the
21 | outstanding shares, or (iii) beneficial ownership of such entity.
22 |
23 | "You" (or "Your") shall mean an individual or Legal Entity
24 | exercising permissions granted by this License.
25 |
26 | "Source" form shall mean the preferred form for making modifications,
27 | including but not limited to software source code, documentation
28 | source, and configuration files.
29 |
30 | "Object" form shall mean any form resulting from mechanical
31 | transformation or translation of a Source form, including but
32 | not limited to compiled object code, generated documentation,
33 | and conversions to other media types.
34 |
35 | "Work" shall mean the work of authorship, whether in Source or
36 | Object form, made available under the License, as indicated by a
37 | copyright notice that is included in or attached to the work
38 | (an example is provided in the Appendix below).
39 |
40 | "Derivative Works" shall mean any work, whether in Source or Object
41 | form, that is based on (or derived from) the Work and for which the
42 | editorial revisions, annotations, elaborations, or other modifications
43 | represent, as a whole, an original work of authorship. For the purposes
44 | of this License, Derivative Works shall not include works that remain
45 | separable from, or merely link (or bind by name) to the interfaces of,
46 | the Work and Derivative Works thereof.
47 |
48 | "Contribution" shall mean any work of authorship, including
49 | the original version of the Work and any modifications or additions
50 | to that Work or Derivative Works thereof, that is intentionally
51 | submitted to Licensor for inclusion in the Work by the copyright owner
52 | or by an individual or Legal Entity authorized to submit on behalf of
53 | the copyright owner. For the purposes of this definition, "submitted"
54 | means any form of electronic, verbal, or written communication sent
55 | to the Licensor or its representatives, including but not limited to
56 | communication on electronic mailing lists, source code control systems,
57 | and issue tracking systems that are managed by, or on behalf of, the
58 | Licensor for the purpose of discussing and improving the Work, but
59 | excluding communication that is conspicuously marked or otherwise
60 | designated in writing by the copyright owner as "Not a Contribution."
61 |
62 | "Contributor" shall mean Licensor and any individual or Legal Entity
63 | on behalf of whom a Contribution has been received by Licensor and
64 | subsequently incorporated within the Work.
65 |
66 | 2. Grant of Copyright License. Subject to the terms and conditions of
67 | this License, each Contributor hereby grants to You a perpetual,
68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable
69 | copyright license to reproduce, prepare Derivative Works of,
70 | publicly display, publicly perform, sublicense, and distribute the
71 | Work and such Derivative Works in Source or Object form.
72 |
73 | 3. Grant of Patent License. Subject to the terms and conditions of
74 | this License, each Contributor hereby grants to You a perpetual,
75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable
76 | (except as stated in this section) patent license to make, have made,
77 | use, offer to sell, sell, import, and otherwise transfer the Work,
78 | where such license applies only to those patent claims licensable
79 | by such Contributor that are necessarily infringed by their
80 | Contribution(s) alone or by combination of their Contribution(s)
81 | with the Work to which such Contribution(s) was submitted. If You
82 | institute patent litigation against any entity (including a
83 | cross-claim or counterclaim in a lawsuit) alleging that the Work
84 | or a Contribution incorporated within the Work constitutes direct
85 | or contributory patent infringement, then any patent licenses
86 | granted to You under this License for that Work shall terminate
87 | as of the date such litigation is filed.
88 |
89 | 4. Redistribution. You may reproduce and distribute copies of the
90 | Work or Derivative Works thereof in any medium, with or without
91 | modifications, and in Source or Object form, provided that You
92 | meet the following conditions:
93 |
94 | (a) You must give any other recipients of the Work or
95 | Derivative Works a copy of this License; and
96 |
97 | (b) You must cause any modified files to carry prominent notices
98 | stating that You changed the files; and
99 |
100 | (c) You must retain, in the Source form of any Derivative Works
101 | that You distribute, all copyright, patent, trademark, and
102 | attribution notices from the Source form of the Work,
103 | excluding those notices that do not pertain to any part of
104 | the Derivative Works; and
105 |
106 | (d) If the Work includes a "NOTICE" text file as part of its
107 | distribution, then any Derivative Works that You distribute must
108 | include a readable copy of the attribution notices contained
109 | within such NOTICE file, excluding those notices that do not
110 | pertain to any part of the Derivative Works, in at least one
111 | of the following places: within a NOTICE text file distributed
112 | as part of the Derivative Works; within the Source form or
113 | documentation, if provided along with the Derivative Works; or,
114 | within a display generated by the Derivative Works, if and
115 | wherever such third-party notices normally appear. The contents
116 | of the NOTICE file are for informational purposes only and
117 | do not modify the License. You may add Your own attribution
118 | notices within Derivative Works that You distribute, alongside
119 | or as an addendum to the NOTICE text from the Work, provided
120 | that such additional attribution notices cannot be construed
121 | as modifying the License.
122 |
123 | You may add Your own copyright statement to Your modifications and
124 | may provide additional or different license terms and conditions
125 | for use, reproduction, or distribution of Your modifications, or
126 | for any such Derivative Works as a whole, provided Your use,
127 | reproduction, and distribution of the Work otherwise complies with
128 | the conditions stated in this License.
129 |
130 | 5. Submission of Contributions. Unless You explicitly state otherwise,
131 | any Contribution intentionally submitted for inclusion in the Work
132 | by You to the Licensor shall be under the terms and conditions of
133 | this License, without any additional terms or conditions.
134 | Notwithstanding the above, nothing herein shall supersede or modify
135 | the terms of any separate license agreement you may have executed
136 | with Licensor regarding such Contributions.
137 |
138 | 6. Trademarks. This License does not grant permission to use the trade
139 | names, trademarks, service marks, or product names of the Licensor,
140 | except as required for reasonable and customary use in describing the
141 | origin of the Work and reproducing the content of the NOTICE file.
142 |
143 | 7. Disclaimer of Warranty. Unless required by applicable law or
144 | agreed to in writing, Licensor provides the Work (and each
145 | Contributor provides its Contributions) on an "AS IS" BASIS,
146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
147 | implied, including, without limitation, any warranties or conditions
148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
149 | PARTICULAR PURPOSE. You are solely responsible for determining the
150 | appropriateness of using or redistributing the Work and assume any
151 | risks associated with Your exercise of permissions under this License.
152 |
153 | 8. Limitation of Liability. In no event and under no legal theory,
154 | whether in tort (including negligence), contract, or otherwise,
155 | unless required by applicable law (such as deliberate and grossly
156 | negligent acts) or agreed to in writing, shall any Contributor be
157 | liable to You for damages, including any direct, indirect, special,
158 | incidental, or consequential damages of any character arising as a
159 | result of this License or out of the use or inability to use the
160 | Work (including but not limited to damages for loss of goodwill,
161 | work stoppage, computer failure or malfunction, or any and all
162 | other commercial damages or losses), even if such Contributor
163 | has been advised of the possibility of such damages.
164 |
165 | 9. Accepting Warranty or Additional Liability. While redistributing
166 | the Work or Derivative Works thereof, You may choose to offer,
167 | and charge a fee for, acceptance of support, warranty, indemnity,
168 | or other liability obligations and/or rights consistent with this
169 | License. However, in accepting such obligations, You may act only
170 | on Your own behalf and on Your sole responsibility, not on behalf
171 | of any other Contributor, and only if You agree to indemnify,
172 | defend, and hold each Contributor harmless for any liability
173 | incurred by, or claims asserted against, such Contributor by reason
174 | of your accepting any such warranty or additional liability.
175 |
176 | END OF TERMS AND CONDITIONS
177 |
178 | APPENDIX: How to apply the Apache License to your work.
179 |
180 | To apply the Apache License to your work, attach the following
181 | boilerplate notice, with the fields enclosed by brackets "[]"
182 | replaced with your own identifying information. (Don't include
183 | the brackets!) The text should be enclosed in the appropriate
184 | comment syntax for the file format. We also recommend that a
185 | file or class name and description of purpose be included on the
186 | same "printed page" as the copyright notice for easier
187 | identification within third-party archives.
188 |
189 | Copyright [yyyy] [name of copyright owner]
190 |
191 | Licensed under the Apache License, Version 2.0 (the "License");
192 | you may not use this file except in compliance with the License.
193 | You may obtain a copy of the License at
194 |
195 | http://www.apache.org/licenses/LICENSE-2.0
196 |
197 | Unless required by applicable law or agreed to in writing, software
198 | distributed under the License is distributed on an "AS IS" BASIS,
199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
200 | See the License for the specific language governing permissions and
201 | limitations under the License.
202 |
--------------------------------------------------------------------------------
/web/js/cw.js:
--------------------------------------------------------------------------------
1 | import { app } from "../../../scripts/app.js";
2 | import { api } from '../../../scripts/api.js'
3 | import {defaultGraph} from "../../../scripts/defaultGraph.js";
4 | // import { ComfyWidgets } from "../../../scripts/widgets.js"
5 | import { ComfyDialog, $el } from "../../../scripts/ui.js";
6 | // import { ShareDialog, SUPPORTED_OUTPUT_NODE_TYPES, getPotentialOutputsAndOutputNodes } from "./comfyui-share.js";
7 |
8 | var docStyle = document.createElement('style');
9 |
10 | // flex-wrap: wrap;
11 | docStyle.innerHTML = `
12 | .cw3-menu-container {
13 | column-gap: 20px;
14 | display: flex;
15 | flex-direction: column;
16 | justify-content: center;
17 | }
18 |
19 | .cw3-menu-column {
20 | display: flex;
21 | flex-direction: column;
22 | }
23 |
24 | .cw3-title {
25 | padding: 10px 10px 0 10p;
26 | background-color: black;
27 | text-align: center;
28 | height: 45px;
29 | }
30 | .cw3-export-title {
31 | padding: 10px 10px 0 10p;
32 | background-color: black;
33 | text-align: center;
34 | height: 75px;
35 | }
36 | `;
37 |
38 | document.head.appendChild(docStyle);
39 |
40 | var badge_mode = "none";
41 |
42 | // copied style from https://github.com/pythongosssss/ComfyUI-Custom-Scripts
43 | const style = `
44 | #comfyworkflows-button {
45 | position: relative;
46 | overflow: hidden;
47 | }
48 | .pysssss-workflow-arrow-2 {
49 | position: absolute;
50 | top: 0;
51 | bottom: 0;
52 | right: 0;
53 | font-size: 12px;
54 | display: flex;
55 | align-items: center;
56 | width: 24px;
57 | justify-content: center;
58 | background: rgba(255,255,255,0.1);
59 | content: "▼";
60 | }
61 | .pysssss-workflow-arrow-2:after {
62 | content: "▼";
63 | }
64 | .pysssss-workflow-arrow-2:hover {
65 | filter: brightness(1.6);
66 | background-color: var(--comfy-menu-bg);
67 | }
68 | .pysssss-workflow-popup-2 ~ .litecontextmenu {
69 | transform: scale(1.3);
70 | }
71 | #comfyworkflows-button-menu {
72 | z-index: 10000000000 !important;
73 | }
74 | `;
75 |
76 |
77 | export var cw_instance = null;
78 | export var cw_import_instance = null;
79 | export var cw_export_instance = null;
80 |
81 | export function setCWInstance(obj) {
82 | cw_instance = obj;
83 | }
84 |
85 | export function setCWImportInstance(obj) {
86 | cw_import_instance = obj;
87 | }
88 |
89 | export function setCWExportInstance(obj) {
90 | cw_export_instance = obj;
91 | }
92 |
93 | async function fetchNicknames() {
94 | const response1 = await api.fetchApi(`/customnode/getmappings?mode=local`);
95 | const mappings = await response1.json();
96 |
97 | let result = {};
98 |
99 | for (let i in mappings) {
100 | let item = mappings[i];
101 | var nickname;
102 | if (item[1].title) {
103 | nickname = item[1].title;
104 | }
105 | else {
106 | nickname = item[1].title_aux;
107 | }
108 |
109 | for (let j in item[0]) {
110 | result[item[0][j]] = nickname;
111 | }
112 | }
113 |
114 | return result;
115 | }
116 |
117 | let nicknames = await fetchNicknames();
118 |
119 |
120 | function newDOMTokenList(initialTokens) {
121 | const tmp = document.createElement(`div`);
122 |
123 | const classList = tmp.classList;
124 | if (initialTokens) {
125 | initialTokens.forEach(token => {
126 | classList.add(token);
127 | });
128 | }
129 |
130 | return classList;
131 | }
132 |
133 | const NODE_TYPE_X_NODE_DATA = {};
134 |
135 |
136 | // -----------
137 | class CWMenuDialog extends ComfyDialog {
138 | static cw_sharekey = "";
139 |
140 | constructor() {
141 | super();
142 |
143 | this.code_input = $el("input", {
144 | type: "text",
145 | placeholder: "Enter your workflow's code here",
146 | required: true
147 | }, []);
148 |
149 | this.final_message = $el("div", {
150 | style: {
151 | color: "white",
152 | textAlign: "center",
153 | // marginTop: "10px",
154 | // backgroundColor: "black",
155 | padding: "10px",
156 | }
157 | }, []);
158 |
159 | this.deploy_button = $el("button", {
160 | type: "submit",
161 | textContent: "Upload workflow",
162 | style: {
163 | backgroundColor: "blue"
164 | }
165 | }, []);
166 |
167 | const close_button = $el("button", {
168 | type: "button", textContent: "Close", onclick: () => {
169 | // Reset state
170 | this.deploy_button.textContent = "Upload workflow";
171 | this.deploy_button.style.display = "inline-block";
172 | this.final_message.innerHTML = "";
173 | this.final_message.style.color = "white";
174 | this.code_input.value = "";
175 | this.close()
176 | }
177 | });
178 |
179 | const content =
180 | $el("div.cw3-menu-container", //"div.comfy-modal-content",
181 | [
182 | $el("tr.cw3-title", {
183 | width: "100%", style: {
184 | padding: "10px 10px 10px 10px",
185 | }
186 | }, [
187 | $el("font", { size: 6, color: "white" }, [`Upload your workflow to ComfyWorkflows.com`]),
188 | $el("br", {}, []),
189 | $el("font", { size: 3, color: "white" }, [`This lets people easily run your workflow online & on their computer.`]),
190 | ]),
191 | $el("br", {}, []),
192 |
193 | // add "share key" input (required), "title" input (required), "description" input (optional)
194 | // $el("div.cw3-menu-container", {width:"100%"}, [
195 | $el("div.cw3-menu-container", [
196 | $el("p", { size: 3, color: "white", style: { color: "white" } }, ["Follow these steps to upload your workflow:"]),
197 | $el("ol", { style: { color: "white" } }, [
198 | $el("li", {}, ["Share your workflow online at ComfyWorkflows.com."]),
199 | $el("li", {}, ["Go to your workflow's URL"]),
200 | $el("li", {}, ["Click the 'Enable online workflow' or 'Update online workflow' button on the workflow's page."]),
201 | $el("li", {}, ["Copy the code shown and paste it below."]),
202 | ]),
203 | $el("br", {}, []),
204 | $el("h4", {
205 | textContent: "Your workflow's code",
206 | size: 3,
207 | color: "white",
208 | style: {
209 | color: 'white'
210 | }
211 | }, []),
212 | this.code_input,
213 | $el("br", {}, []),
214 |
215 | this.final_message,
216 | $el("br", {}, []),
217 |
218 | ]),
219 | this.deploy_button,
220 | close_button,
221 | ],
222 | );
223 |
224 | this.deploy_button.onclick = async () => {
225 | if (!this.code_input.value) {
226 | alert("Please enter your workflow's code.");
227 | return;
228 | }
229 |
230 | const prompt = await app.graphToPrompt();
231 |
232 | const workflowNodes = prompt.workflow.nodes;
233 | const filteredNodeTypeToNodeData = {};
234 | for (const workflowNode of workflowNodes) {
235 | const workflowNodeData = NODE_TYPE_X_NODE_DATA[workflowNode.type];
236 | if (workflowNodeData) {
237 | filteredNodeTypeToNodeData[workflowNode.type] = workflowNodeData;
238 | }
239 | }
240 |
241 | // Change the text of the share button to "Sharing..." to indicate that the share process has started
242 | this.deploy_button.textContent = "Uploading...";
243 | this.final_message.style.color = "white"; //"green";
244 | const initialFinalMessage = "This may take a few minutes. Please do not close this window. See the console for upload progress.";
245 | this.final_message.innerHTML = initialFinalMessage;
246 |
247 | // set an interval to call /cw/deploy_progress every 1 second to get the upload progress and set the text of the final message
248 | // cancel the interval once the /cw/deploy endpoint returns a response
249 |
250 | const deployProgressInterval = setInterval(async () => {
251 | const deployProgressResp = await api.fetchApi(`/cw/upload_progress`, {
252 | method: 'GET',
253 | headers: { 'Content-Type': 'application/json' },
254 | });
255 |
256 | if (deployProgressResp.status == 200) {
257 | try {
258 | const deployProgressResp_json = await deployProgressResp.json();
259 | const statusText = deployProgressResp_json.status;
260 | if (statusText) {
261 | this.final_message.innerHTML = initialFinalMessage + "
" + statusText;
262 | }
263 | } catch (e) {
264 | // console.log(e);
265 | }
266 | }
267 | }, 1_000);
268 |
269 | const response = await api.fetchApi(`/cw/upload`, {
270 | method: 'POST',
271 | headers: { 'Content-Type': 'application/json' },
272 | body: JSON.stringify({
273 | code: this.code_input.value,
274 | prompt,
275 | filteredNodeTypeToNodeData
276 | })
277 | });
278 |
279 | clearInterval(deployProgressInterval);
280 |
281 | if (response.status != 200) {
282 | try {
283 | const response_json = await response.json();
284 | if (response_json.error) {
285 | alert(response_json.error);
286 | this.deploy_button.textContent = "Upload workflow";
287 | this.deploy_button.style.display = "inline-block";
288 | this.final_message.innerHTML = "";
289 | this.final_message.style.color = "white";
290 | this.code_input.value = "";
291 | this.close();
292 | return;
293 | } else {
294 | alert("Failed to upload your workflow. Please try again.");
295 | this.deploy_button.textContent = "Upload workflow";
296 | this.deploy_button.style.display = "inline-block";
297 | this.final_message.innerHTML = "";
298 | this.final_message.style.color = "white";
299 | this.code_input.value = "";
300 | this.close();
301 | return;
302 | }
303 | } catch (e) {
304 | alert("Failed to upload your workflow. Please try again.");
305 | this.deploy_button.textContent = "Upload workflow";
306 | this.deploy_button.style.display = "inline-block";
307 | this.final_message.innerHTML = "";
308 | this.final_message.style.color = "white";
309 | this.code_input.value = "";
310 | this.close();
311 | return;
312 | }
313 | }
314 |
315 | const response_json = await response.json();
316 |
317 | if (response_json.deploy_url) {
318 | this.final_message.innerHTML = "Your workflow has been uploaded! Now, anyone can run your workflow online at: " + response_json.deploy_url + "";
319 | }
320 |
321 | this.final_message.style.color = "white"; //"green";
322 |
323 | // hide the share button
324 | this.deploy_button.textContent = "Uploaded!";
325 | this.deploy_button.style.display = "none";
326 | }
327 |
328 |
329 | content.style.width = '100%';
330 | content.style.height = '100%';
331 |
332 | this.element = $el("div.comfy-modal", { parent: document.body }, [content]);
333 | this.element.style.width = '1000px';
334 | // this.element.style.height = '400px';
335 | this.element.style.zIndex = 10000;
336 | }
337 |
338 | show() {
339 | this.element.style.display = "block";
340 | }
341 | }
342 |
343 |
344 |
345 | class CWExportMenuDialog extends ComfyDialog {
346 | constructor() {
347 | super();
348 |
349 | this.final_message = $el("div", {
350 | style: {
351 | color: "white",
352 | textAlign: "center",
353 | // marginTop: "10px",
354 | // backgroundColor: "black",
355 | padding: "10px",
356 | }
357 | }, []);
358 |
359 | this.deploy_button = $el("button", {
360 | type: "submit",
361 | textContent: "Export workflow",
362 | style: {
363 | backgroundColor: "blue"
364 | }
365 | }, []);
366 |
367 | const close_button = $el("button", {
368 | type: "button", textContent: "Close", onclick: () => {
369 | // Reset state
370 | this.deploy_button.textContent = "Export workflow";
371 | this.deploy_button.style.display = "inline-block";
372 | this.final_message.innerHTML = "";
373 | this.final_message.style.color = "white";
374 | this.close()
375 | }
376 | });
377 |
378 | const content =
379 | $el("div.cw3-menu-container", //"div.comfy-modal-content",
380 | [
381 | $el("tr.cw3-export-title", {
382 | width: "100%", style: {
383 | padding: "10px 10px 10px 10px",
384 | }
385 | }, [
386 | $el("font", { size: 6, color: "white" }, [`Export your workflow`]),
387 | $el("br", {}, []),
388 | $el("font", { size: 3, color: "white" }, [`This will let anyone import & run this workflow with ZERO setup, using ComfyUI-Launcher.`]),
389 | $el("br", {}, []),
390 | // https://github.com/thecooltechguy/ComfyUI-Launcher
391 | $el("font", { size: 2, color: "white" }, ["https://github.com/thecooltechguy/ComfyUI-Launcher"]),
392 | ]),
393 | $el("br", {}, []),
394 | this.final_message,
395 | $el("br", {}, []),
396 | this.deploy_button,
397 | close_button,
398 | ],
399 | );
400 |
401 | this.deploy_button.onclick = async () => {
402 | const prompt = await app.graphToPrompt();
403 |
404 | const workflowNodes = prompt.workflow.nodes;
405 | const filteredNodeTypeToNodeData = {};
406 | for (const workflowNode of workflowNodes) {
407 | const workflowNodeData = NODE_TYPE_X_NODE_DATA[workflowNode.type];
408 | if (workflowNodeData) {
409 | filteredNodeTypeToNodeData[workflowNode.type] = workflowNodeData;
410 | }
411 | }
412 |
413 | // Change the text of the share button to "Sharing..." to indicate that the share process has started
414 | this.deploy_button.textContent = "Exporting...";
415 | this.final_message.style.color = "white"; //"green";
416 | const initialFinalMessage = "This may take a few minutes. Please do not close this window. See the console for the export progress.";
417 | this.final_message.innerHTML = initialFinalMessage;
418 |
419 | // set an interval to call /cw/export_progress every 1 second to get the export progress and set the text of the final message
420 | // cancel the interval once the /cw/export endpoint returns a response
421 |
422 | const deployProgressInterval = setInterval(async () => {
423 | const deployProgressResp = await api.fetchApi(`/cw/export_progress`, {
424 | method: 'GET',
425 | headers: { 'Content-Type': 'application/json' },
426 | });
427 |
428 | if (deployProgressResp.status == 200) {
429 | try {
430 | const deployProgressResp_json = await deployProgressResp.json();
431 | const statusText = deployProgressResp_json.status;
432 | if (statusText) {
433 | this.final_message.innerHTML = initialFinalMessage + "
" + statusText;
434 | }
435 | } catch (e) {
436 | // console.log(e);
437 | }
438 | }
439 | }, 1_000);
440 |
441 | const response = await api.fetchApi(`/cw/export`, {
442 | method: 'POST',
443 | headers: { 'Content-Type': 'application/json' },
444 | body: JSON.stringify({
445 | prompt,
446 | filteredNodeTypeToNodeData
447 | })
448 | });
449 |
450 | clearInterval(deployProgressInterval);
451 |
452 | if (response.status != 200) {
453 | try {
454 | const response_json = await response.json();
455 | if (response_json.error) {
456 | alert(response_json.error);
457 | this.deploy_button.textContent = "Export workflow";
458 | this.deploy_button.style.display = "inline-block";
459 | this.final_message.innerHTML = "";
460 | this.final_message.style.color = "white";
461 | this.close();
462 | return;
463 | } else {
464 | alert("Failed to export your workflow. Please try again.");
465 | this.deploy_button.textContent = "Export workflow";
466 | this.deploy_button.style.display = "inline-block";
467 | this.final_message.innerHTML = "";
468 | this.final_message.style.color = "white";
469 | this.close();
470 | return;
471 | }
472 | } catch (e) {
473 | alert("Failed to export your workflow. Please try again.");
474 | this.deploy_button.textContent = "Export workflow";
475 | this.deploy_button.style.display = "inline-block";
476 | this.final_message.innerHTML = "";
477 | this.final_message.style.color = "white";
478 | this.close();
479 | return;
480 | }
481 | }
482 |
483 | const response_json = await response.json();
484 |
485 | // trigger a download of a json file containing the response_json as content
486 | const blob = new Blob([JSON.stringify(response_json)], { type: 'application/json' });
487 | const url = URL.createObjectURL(blob);
488 | const a = document.createElement('a');
489 | a.href = url;
490 | a.download = 'comfyui-launcher.json';
491 | document.body.appendChild(a);
492 | a.click();
493 | a.remove();
494 | URL.revokeObjectURL(url);
495 |
496 | this.final_message.innerHTML = "Your workflow has been exported & downloaded to your computer (as a comfyui-launcher.json file). Now, anyone can run your workflow with ZERO setup using ComfyUI-Launcher.";
497 | this.final_message.style.color = "white"; //"green";
498 |
499 | // hide the share button
500 | this.deploy_button.textContent = "Exported!";
501 | this.deploy_button.style.display = "none";
502 | }
503 |
504 |
505 | content.style.width = '100%';
506 | content.style.height = '100%';
507 |
508 | this.element = $el("div.comfy-modal", { parent: document.body }, [content]);
509 | this.element.style.width = '1000px';
510 | // this.element.style.height = '400px';
511 | this.element.style.zIndex = 10000;
512 | }
513 |
514 | show() {
515 | this.element.style.display = "block";
516 | }
517 | }
518 |
519 |
520 |
521 |
522 | app.registerExtension({
523 | name: "ComfyUI.ComfyWorkflows",
524 | init() {
525 | $el("style", {
526 | textContent: style,
527 | parent: document.head,
528 | });
529 | },
530 | async setup() {
531 | // console.log(JSON.stringify(NODE_TYPE_X_NODE_DATA));
532 | const menu = document.querySelector(".comfy-menu");
533 | const separator = document.createElement("hr");
534 |
535 | separator.style.margin = "20px 0";
536 | separator.style.width = "100%";
537 | menu.append(separator);
538 |
539 | const deployButton = document.createElement("button");
540 | deployButton.textContent = "Upload to ComfyWorkflows";
541 | deployButton.onclick = () => {
542 | if (!cw_instance)
543 | setCWInstance(new CWMenuDialog());
544 | cw_instance.show();
545 | }
546 | menu.append(deployButton);
547 |
548 | const exportButton = document.createElement("button");
549 | exportButton.textContent = "Export workflow (Launcher)";
550 | exportButton.onclick = () => {
551 | if (!cw_export_instance)
552 | setCWExportInstance(new CWExportMenuDialog());
553 | cw_export_instance.show();
554 | }
555 | menu.append(exportButton);
556 |
557 | // if this is the first time the user is opening this project, load the default graph for this project
558 | // this is necessary because the user may have previously run a different comfyui on the same port as this project, so the local storage would have that workflow's graph
559 | const res = await api.fetchApi(`/cw/current_graph`, {
560 | method: 'GET',
561 | headers: { 'Content-Type': 'application/json' },
562 | });
563 | if (res.status === 200) {
564 | const res_json = await res.json();
565 | if (res_json) {
566 | await app.loadGraphData(res_json);
567 | } else {
568 | await app.loadGraphData(defaultGraph);
569 | }
570 |
571 | // note how we only start the interval to save the graph to the server after the graph has been loaded initially
572 | setInterval(async () => {
573 | const graph = await app.graphToPrompt();
574 | const res = await api.fetchApi(`/cw/save_graph`, {
575 | method: 'POST',
576 | headers: { 'Content-Type': 'application/json' },
577 | body: JSON.stringify(graph['workflow']),
578 | });
579 | console.log("Saved graph to server: " + res.status);
580 | }, 1_000);
581 | } else {
582 | await app.loadGraphData(defaultGraph);
583 | }
584 | },
585 |
586 | async beforeRegisterNodeDef(nodeType, nodeData, app) {
587 | NODE_TYPE_X_NODE_DATA[nodeData.name] = nodeData;
588 |
589 | const onDrawForeground = nodeType.prototype.onDrawForeground;
590 | nodeType.prototype.onDrawForeground = function (ctx) {
591 | const r = onDrawForeground?.apply?.(this, arguments);
592 |
593 | if (!this.flags.collapsed && badge_mode != 'none' && nodeType.title_mode != LiteGraph.NO_TITLE) {
594 | let text = "";
595 | if (badge_mode == 'id_nick')
596 | text = `#${this.id} `;
597 |
598 | if (nicknames[nodeData.name.trim()]) {
599 | let nick = nicknames[nodeData.name.trim()];
600 |
601 | if (nick.length > 25) {
602 | text += nick.substring(0, 23) + "..";
603 | }
604 | else {
605 | text += nick;
606 | }
607 | }
608 |
609 | if (text != "") {
610 | let fgColor = "white";
611 | let bgColor = "#0F1F0F";
612 | let visible = true;
613 |
614 | ctx.save();
615 | ctx.font = "12px sans-serif";
616 | const sz = ctx.measureText(text);
617 | ctx.fillStyle = bgColor;
618 | ctx.beginPath();
619 | ctx.roundRect(this.size[0] - sz.width - 12, -LiteGraph.NODE_TITLE_HEIGHT - 20, sz.width + 12, 20, 5);
620 | ctx.fill();
621 |
622 | ctx.fillStyle = fgColor;
623 | ctx.fillText(text, this.size[0] - sz.width - 6, -LiteGraph.NODE_TITLE_HEIGHT - 6);
624 | ctx.restore();
625 | }
626 | }
627 | return r;
628 | };
629 | },
630 |
631 | async loadedGraphNode(node, app) {
632 | if (node.has_errors) {
633 | const onDrawForeground = node.onDrawForeground;
634 | node.onDrawForeground = function (ctx) {
635 | const r = onDrawForeground?.apply?.(this, arguments);
636 |
637 | if (!this.flags.collapsed && badge_mode != 'none') {
638 | let text = "";
639 | if (badge_mode == 'id_nick')
640 | text = `#${this.id} `;
641 |
642 | if (nicknames[node.type.trim()]) {
643 | let nick = nicknames[node.type.trim()];
644 |
645 | if (nick.length > 25) {
646 | text += nick.substring(0, 23) + "..";
647 | }
648 | else {
649 | text += nick;
650 | }
651 | }
652 |
653 | if (text != "") {
654 | let fgColor = "white";
655 | let bgColor = "#0F1F0F";
656 | let visible = true;
657 |
658 | ctx.save();
659 | ctx.font = "12px sans-serif";
660 | const sz = ctx.measureText(text);
661 | ctx.fillStyle = bgColor;
662 | ctx.beginPath();
663 | ctx.roundRect(this.size[0] - sz.width - 12, -LiteGraph.NODE_TITLE_HEIGHT - 20, sz.width + 12, 20, 5);
664 | ctx.fill();
665 |
666 | ctx.fillStyle = fgColor;
667 | ctx.fillText(text, this.size[0] - sz.width - 6, -LiteGraph.NODE_TITLE_HEIGHT - 6);
668 | ctx.restore();
669 |
670 | ctx.save();
671 | ctx.font = "bold 14px sans-serif";
672 | const sz2 = ctx.measureText(node.type);
673 | ctx.fillStyle = 'white';
674 | ctx.fillText(node.type, this.size[0] / 2 - sz2.width / 2, this.size[1] / 2);
675 | ctx.restore();
676 | }
677 | }
678 |
679 | return r;
680 | };
681 | }
682 | }
683 | });
684 |
--------------------------------------------------------------------------------
/__init__.py:
--------------------------------------------------------------------------------
1 | import hashlib
2 | import io
3 | import json
4 | import os
5 | import platform
6 | import sys
7 | import time
8 | import aiohttp
9 | import git
10 | import folder_paths
11 | import server
12 | import os
13 | import pkg_resources
14 | from typing import Callable
15 | from aiohttp import web
16 | from aiohttp_retry import ExponentialRetry, RetryClient
17 | from tqdm.asyncio import tqdm
18 | from .exports import generate_export_json_file
19 |
20 |
21 | NODE_CLASS_MAPPINGS = {}
22 | NODE_DISPLAY_NAME_MAPPINGS = {}
23 |
24 |
25 | WEB_DIRECTORY = "./web"
26 | __all__ = ["NODE_CLASS_MAPPINGS", "NODE_DISPLAY_NAME_MAPPINGS", "WEB_DIRECTORY"]
27 |
28 | comfy_path = os.path.dirname(folder_paths.__file__)
29 | custom_nodes_path = os.path.join(comfy_path, 'custom_nodes')
30 |
31 | CW_ENDPOINT = os.environ.get("CW_ENDPOINT", "https://comfyworkflows.com")
32 |
33 |
34 | def get_current_snapshot():
35 | # Get ComfyUI hash (credit to ComfyUI-Manager for this function)
36 | repo_path = os.path.dirname(folder_paths.__file__)
37 |
38 | if not os.path.exists(os.path.join(repo_path, '.git')):
39 | print(f"ComfyUI update fail: The installed ComfyUI does not have a Git repository.")
40 | return web.Response(status=400)
41 |
42 | repo = git.Repo(repo_path)
43 | comfyui_commit_hash = repo.head.commit.hexsha
44 |
45 | git_custom_nodes = {}
46 | file_custom_nodes = []
47 |
48 | # Get custom nodes hash
49 | for path in os.listdir(custom_nodes_path):
50 | fullpath = os.path.join(custom_nodes_path, path)
51 |
52 | if os.path.isdir(fullpath):
53 | is_disabled = path.endswith(".disabled")
54 |
55 | try:
56 | git_dir = os.path.join(fullpath, '.git')
57 |
58 | if not os.path.exists(git_dir):
59 | continue
60 |
61 | repo = git.Repo(fullpath)
62 | commit_hash = repo.head.commit.hexsha
63 | url = repo.remotes.origin.url
64 | git_custom_nodes[url] = {
65 | 'hash': commit_hash,
66 | 'disabled': is_disabled
67 | }
68 |
69 | except:
70 | print(f"Failed to extract snapshots for the custom node '{path}'.")
71 |
72 | elif path.endswith('.py'):
73 | is_disabled = path.endswith(".py.disabled")
74 | filename = os.path.basename(path)
75 | item = {
76 | 'filename': filename,
77 | 'disabled': is_disabled
78 | }
79 |
80 | file_custom_nodes.append(item)
81 |
82 | return {
83 | 'comfyui': comfyui_commit_hash,
84 | 'git_custom_nodes': git_custom_nodes,
85 | 'file_custom_nodes': file_custom_nodes,
86 | }
87 |
88 | def get_file_sha256_checksum(file_path):
89 | BUF_SIZE = 65536 # lets read stuff in 64kb chunks!
90 | sha256 = hashlib.sha256()
91 | with open(file_path, 'rb') as f:
92 | while True:
93 | data = f.read(BUF_SIZE)
94 | if not data:
95 | break
96 | sha256.update(data)
97 | return sha256.hexdigest()
98 |
99 | def extract_file_names(json_data):
100 | """Extract unique file names from the input JSON data."""
101 | file_names = set()
102 |
103 | # Recursively search for file names in the JSON data
104 | def recursive_search(data):
105 | if isinstance(data, dict):
106 | for value in data.values():
107 | recursive_search(value)
108 | elif isinstance(data, list):
109 | for item in data:
110 | recursive_search(item)
111 | elif isinstance(data, str) and '.' in data:
112 | file_names.add(os.path.basename(data)) # file_names.add(data)
113 |
114 | recursive_search(json_data)
115 | return list(file_names)
116 |
117 | def find_file_paths(base_dir, file_names):
118 | """Find the paths of the files in the base directory."""
119 | file_paths = {}
120 |
121 | for root, dirs, files in os.walk(base_dir, followlinks=True):
122 | # Exclude certain directories
123 | dirs[:] = [d for d in dirs if d not in ['.git']]
124 |
125 | for file in files:
126 | if file in file_names:
127 | file_paths[file] = os.path.join(root, file)
128 | return file_paths
129 |
130 |
131 | class CallbackBytesIO(io.BytesIO):
132 |
133 | def __init__(self, callback: Callable, initial_bytes: bytes):
134 | self._callback = callback
135 | super().__init__(initial_bytes)
136 |
137 | def read(self, size=-1) -> bytes:
138 | data = super().read(size)
139 | self._callback(len(data))
140 | return data
141 |
142 | DEPLOY_PROGRESS = {}
143 | EXPORT_PROGRESS = {}
144 |
145 | @server.PromptServer.instance.routes.get("/cw/upload_progress")
146 | async def api_comfyworkflows_upload_progress(request):
147 | global DEPLOY_PROGRESS
148 | return web.json_response(DEPLOY_PROGRESS)
149 |
150 | @server.PromptServer.instance.routes.get("/cw/export_progress")
151 | async def api_comfyworkflows_export_progress(request):
152 | global EXPORT_PROGRESS
153 | return web.json_response(EXPORT_PROGRESS)
154 |
155 | UPLOAD_CHUNK_SIZE = 100_000_000 # 100 MB
156 |
157 | def get_num_chunks(file_size):
158 | global UPLOAD_CHUNK_SIZE
159 | num_chunks = file_size // UPLOAD_CHUNK_SIZE
160 | if file_size % UPLOAD_CHUNK_SIZE != 0:
161 | num_chunks += 1
162 | return num_chunks
163 |
164 | @server.PromptServer.instance.routes.get("/comfyui_interface")
165 | async def get_comfyui_interface(request):
166 | print(os.path.join(server.PromptServer.instance.web_root, "comfyui_index.html"))
167 | return web.FileResponse(os.path.join(server.PromptServer.instance.web_root, "comfyui_index.html"))
168 |
169 | @server.PromptServer.instance.routes.get("/cw/current_graph")
170 | async def api_comfyworkflows_current_graph(request):
171 | current_file_directory = os.path.dirname(os.path.abspath(__file__))
172 | current_graph_filepath = os.path.join(current_file_directory, "current_graph.json")
173 | if not os.path.exists(current_graph_filepath):
174 | return web.Response(status=404)
175 | return web.json_response(json.load(open(current_graph_filepath, "r")))
176 |
177 | @server.PromptServer.instance.routes.post("/cw/save_graph")
178 | async def api_comfyworkflows_save_graph(request):
179 | json_data = await request.json()
180 | current_file_directory = os.path.dirname(os.path.abspath(__file__))
181 | current_graph_filepath = os.path.join(current_file_directory, "current_graph.json")
182 | with open(current_graph_filepath, "w") as f:
183 | json.dump(json_data, f)
184 | return web.Response(status=200)
185 |
186 | @server.PromptServer.instance.routes.post("/cw/reset_load_default_graph")
187 | async def api_comfyworkflows_reset_load_default_graph(request):
188 | current_file_directory = os.path.dirname(os.path.abspath(__file__))
189 | load_default_graph_filepath = os.path.join(current_file_directory, "load_default_graph.txt")
190 | if os.path.exists(load_default_graph_filepath):
191 | os.remove(load_default_graph_filepath)
192 | return web.Response(status=200)
193 |
194 | @server.PromptServer.instance.routes.post("/cw/upload")
195 | async def api_comfyworkflows_upload(request):
196 | global DEPLOY_PROGRESS
197 | print("Uploading workflow...")
198 | json_data = await request.json()
199 |
200 | code = json_data['code']
201 | prompt = json_data['prompt']
202 | filteredNodeTypeToNodeData = json_data['filteredNodeTypeToNodeData']
203 |
204 | # Example usage
205 | base_directory = folder_paths.base_path #"./"
206 |
207 | # Parse the JSON
208 | parsed_json = prompt
209 |
210 | DEPLOY_PROGRESS = {
211 | "status" : "preparing upload...",
212 | }
213 |
214 | # TODO: For now, we assume that there are no duplicate files with the same name at 2 or more different paths.
215 |
216 | # Extract file names
217 | file_names = set(extract_file_names(parsed_json))
218 | print("File names: ", file_names)
219 |
220 | # Find file paths
221 | file_paths = find_file_paths(base_directory, file_names)
222 | print("File paths: ", file_paths)
223 |
224 | all_file_info = {}
225 | for file_name, file_path in file_paths.items():
226 | file_checksum = get_file_sha256_checksum(file_path)
227 | all_file_info[file_name] = {
228 | 'path': file_path,
229 | 'size': os.path.getsize(file_path),
230 | 'dest_relative_path': os.path.relpath(file_path, base_directory),
231 | 'checksum': file_checksum
232 | }
233 |
234 | extra_folders_to_upload = [
235 | ]
236 | for folder in extra_folders_to_upload:
237 | abs_folder_path = os.path.abspath(folder)
238 | for root, dirs, files in os.walk(abs_folder_path, followlinks=True):
239 | for file in files:
240 | file_path = os.path.join(root, file)
241 | file_checksum = get_file_sha256_checksum(file_path)
242 | all_file_info[file] = {
243 | 'path': file_path,
244 | 'size': os.path.getsize(file_path),
245 | 'dest_relative_path': os.path.relpath(file_path, base_directory),
246 | 'checksum': file_checksum
247 | }
248 |
249 | total_num_chunks = 0
250 | for file_name, file_info in all_file_info.items():
251 | num_chunks = get_num_chunks(file_info['size'])
252 | total_num_chunks += num_chunks
253 |
254 | DEPLOY_PROGRESS = {
255 | "status" : "creating snapshot...",
256 | }
257 |
258 | # Compute snapshot
259 | snapshot_json = get_current_snapshot()
260 | # print("Current snapshot json:")
261 | # print(snapshot_json)
262 |
263 | raise_for_status = {x for x in range(100, 600)}
264 | raise_for_status.remove(200)
265 | raise_for_status.remove(429)
266 |
267 | pip_packages = []
268 | installed_packages = pkg_resources.working_set
269 | for package in installed_packages:
270 | pip_package = package.__dict__
271 | if "_provider" in pip_package:
272 | del pip_package["_provider"]
273 | pip_packages.append(pip_package)
274 |
275 | # First, create the runnable workflow object
276 | async with aiohttp.ClientSession(trust_env=True, connector=aiohttp.TCPConnector(verify_ssl=False)) as session:
277 | retry_client = RetryClient(session, retry_options=ExponentialRetry(attempts=3), raise_for_status=raise_for_status)
278 |
279 | async with retry_client.post(
280 | f"{CW_ENDPOINT}/api/runnable-workflows/init_runnable_workflow",
281 | json={
282 | "runnable_workflow_key": code,
283 | "num_files" : len(all_file_info),
284 | "workflow_json" : json.dumps(prompt),
285 | "snapshot_json" : json.dumps(snapshot_json),
286 | "filteredNodeTypeToNodeData" : json.dumps(filteredNodeTypeToNodeData),
287 | "pip_packages" : json.dumps(pip_packages) if pip_packages else None,
288 | },
289 | ) as resp:
290 | assert resp.status == 200
291 |
292 | # Now, we upload each file
293 | DEPLOY_PROGRESS = {
294 | "status" : f"uploading files... (0%)",
295 | }
296 | total_num_files = len(all_file_info)
297 | current_file_index = -1
298 | num_chunks_uploaded = 0
299 | for file_name, file_info in all_file_info.items():
300 | # print(f"Going to upload file: {file_name}...")
301 | DEPLOY_PROGRESS = {
302 | "status" : f"uploading files... ({round(100.0 * num_chunks_uploaded / total_num_chunks, 2)}%)",
303 | }
304 |
305 | num_chunks_for_file = get_num_chunks(file_info['size'])
306 | current_file_index += 1
307 | async with retry_client.post(
308 | f"{CW_ENDPOINT}/api/runnable-workflows/get_presigned_url_for_runnable_workflow_file",
309 | json={
310 | "runnable_workflow_key": code,
311 | "dest_relative_path" : file_info['dest_relative_path'],
312 | "sha256_checksum": file_info['checksum'],
313 | 'size': file_info['size'],
314 | },
315 | ) as resp:
316 | assert resp.status == 200
317 | upload_json = await resp.json()
318 |
319 | if upload_json['uploadFile'] == False:
320 | print(f"Skipping file {file_name} because it already exists in the cloud.")
321 | num_chunks_uploaded += num_chunks_for_file
322 | continue
323 |
324 | uploadId = upload_json['uploadId']
325 | presigned_urls = upload_json['signedUrlsList']
326 | objectKey = upload_json['objectKey']
327 |
328 | # print(presigned_url)
329 | # print("Uploading file: {0}".format(file_info['path']))
330 | t = time.time()
331 | # headers = {
332 | # "Content-Length": str(file_info['size']),
333 | # }
334 | # print(headers)
335 | # progress_bar = tqdm(
336 | # desc=f"Uploading {os.path.basename(file_info['path'])}",
337 | # unit="B",
338 | # unit_scale=True,
339 | # total=file_info['size'],
340 | # unit_divisor=1024,
341 | # )
342 |
343 | # with open(file_info['path'], "rb") as f:
344 | # file_data = CallbackBytesIO(progress_bar.update, f.read())
345 |
346 | parts = []
347 |
348 | progress_bar = tqdm(
349 | desc=f"Uploading file ({(current_file_index + 1)}/{total_num_files}) {os.path.basename(file_info['path'])}",
350 | unit="B",
351 | unit_scale=True,
352 | total=file_info['size'],
353 | unit_divisor=1024,
354 | )
355 |
356 | with open(file_info['path'], "rb") as f:
357 | chunk_index = 0
358 | while True:
359 | data = f.read(UPLOAD_CHUNK_SIZE)
360 | if not data:
361 | # print("Finished uploading file. ", chunk_index, UPLOAD_CHUNK_SIZE)
362 | break
363 |
364 | max_retries = 5
365 | num_retries = 0
366 | while num_retries < max_retries:
367 | try:
368 | async with retry_client.put(presigned_urls[chunk_index],data=data) as resp:
369 | assert resp.status == 200
370 | parts.append({
371 | 'ETag': resp.headers['ETag'],
372 | 'PartNumber': chunk_index + 1,
373 | })
374 | break
375 | except:
376 | num_retries += 1
377 | # print(f"Failed to upload chunk {chunk_index} of file {file_name} to {presigned_urls[chunk_index]}... retrying ({num_retries}/{max_retries})")
378 | if num_retries == max_retries:
379 | raise Exception(f"Failed to upload file {os.path.basename(file_info['path'])} after {max_retries} retries.")
380 |
381 | progress_bar.update(len(data))
382 |
383 | chunk_index += 1
384 |
385 | num_chunks_uploaded += 1
386 | DEPLOY_PROGRESS = {
387 | "status" : f"uploading files... ({round(100.0 * num_chunks_uploaded / total_num_chunks, 2)}%)",
388 | }
389 |
390 | # Complete the multipart upload for this file
391 | async with retry_client.post(
392 | f"{CW_ENDPOINT}/api/runnable-workflows/complete_multipart_upload_for_runnable_workflow_file",
393 | json={
394 | "parts": parts,
395 | "objectKey": objectKey,
396 | "uploadId": uploadId,
397 | "runnable_workflow_key": code,
398 | },
399 | ) as resp:
400 | assert resp.status == 200
401 | # print("Upload took {0} seconds".format(time.time() - t))
402 |
403 | # One last request to finalize the runnable workflow
404 | async with retry_client.post(
405 | f"{CW_ENDPOINT}/api/runnable-workflows/finalize_runnable_workflow",
406 | json={
407 | "runnable_workflow_key": code,
408 | },
409 | ) as resp:
410 | assert resp.status == 200
411 | resp_json = await resp.json()
412 | workflow_id = resp_json['workflow_id']
413 | version_id = resp_json['version_id']
414 |
415 | workflow_deploy_url = f"{CW_ENDPOINT}/workflows/{workflow_id}?version={version_id}"
416 | DEPLOY_PROGRESS = {}
417 | print("\n\n")
418 | print(f"Successfully uploaded workflow: ", workflow_deploy_url)
419 |
420 | # Now, return a json response with the workflow ID
421 | return web.json_response({"deploy_url": workflow_deploy_url})
422 |
423 |
424 | @server.PromptServer.instance.routes.post("/cw/export")
425 | async def api_comfyworkflows_export(request):
426 | global EXPORT_PROGRESS
427 | print("Exporting workflow...")
428 | json_data = await request.json()
429 |
430 | prompt = json_data['prompt']
431 | filteredNodeTypeToNodeData = json_data['filteredNodeTypeToNodeData']
432 |
433 | # Example usage
434 | base_directory = folder_paths.base_path #"./"
435 |
436 | # Parse the JSON
437 | parsed_json = prompt
438 |
439 | EXPORT_PROGRESS = {
440 | "status" : "preparing export...",
441 | }
442 |
443 | # TODO: For now, we assume that there are no duplicate files with the same name at 2 or more different paths.
444 |
445 | # Extract file names
446 | file_names = set(extract_file_names(parsed_json))
447 | print("File names: ", file_names)
448 |
449 | # Find file paths
450 | file_paths = find_file_paths(base_directory, file_names)
451 | print("File paths: ", file_paths)
452 |
453 | all_file_info = {}
454 | for file_name, file_path in file_paths.items():
455 | file_checksum = get_file_sha256_checksum(file_path)
456 | all_file_info[file_name] = {
457 | 'path': file_path,
458 | 'size': os.path.getsize(file_path),
459 | 'dest_relative_path': os.path.relpath(file_path, base_directory),
460 | 'checksum': file_checksum
461 | }
462 |
463 | extra_folders_to_upload = [
464 | ]
465 | for folder in extra_folders_to_upload:
466 | abs_folder_path = os.path.abspath(folder)
467 | for root, dirs, files in os.walk(abs_folder_path, followlinks=True):
468 | for file in files:
469 | file_path = os.path.join(root, file)
470 | file_checksum = get_file_sha256_checksum(file_path)
471 | all_file_info[file] = {
472 | 'path': file_path,
473 | 'size': os.path.getsize(file_path),
474 | 'dest_relative_path': os.path.relpath(file_path, base_directory),
475 | 'checksum': file_checksum
476 | }
477 |
478 | total_num_chunks = 0
479 | for file_name, file_info in all_file_info.items():
480 | num_chunks = get_num_chunks(file_info['size'])
481 | total_num_chunks += num_chunks
482 |
483 | EXPORT_PROGRESS = {
484 | "status" : "creating snapshot...",
485 | }
486 |
487 | # Compute snapshot
488 | # TODO: Support non-public custom nodes
489 | snapshot_json = get_current_snapshot()
490 |
491 | raise_for_status = {x for x in range(100, 600)}
492 | raise_for_status.remove(200)
493 | raise_for_status.remove(429)
494 |
495 | pip_packages = []
496 | installed_packages = pkg_resources.working_set
497 | for package in installed_packages:
498 | pip_package = package.__dict__
499 | if "_provider" in pip_package:
500 | del pip_package["_provider"]
501 | if "location" in pip_package:
502 | del pip_package["location"]
503 | pip_packages.append(pip_package)
504 |
505 | files_data = []
506 |
507 | # First, create the runnable workflow object
508 | async with aiohttp.ClientSession(trust_env=True, connector=aiohttp.TCPConnector(verify_ssl=False)) as session:
509 | retry_client = RetryClient(session, retry_options=ExponentialRetry(attempts=3), raise_for_status=raise_for_status)
510 | # Now, we upload each file
511 | EXPORT_PROGRESS = {
512 | "status" : f"uploading files... (0%)",
513 | }
514 | total_num_files = len(all_file_info)
515 | current_file_index = -1
516 | num_chunks_uploaded = 0
517 | for file_name, file_info in all_file_info.items():
518 | # print(f"Going to upload file: {file_name}...")
519 | EXPORT_PROGRESS = {
520 | "status" : f"uploading files... ({round(100.0 * num_chunks_uploaded / total_num_chunks, 2)}%)",
521 | }
522 |
523 | num_chunks_for_file = get_num_chunks(file_info['size'])
524 | current_file_index += 1
525 | async with retry_client.post(
526 | f"{CW_ENDPOINT}/api/comfyui-launcher/get_presigned_url_for_launcher_export_file",
527 | json={
528 | "dest_relative_path" : file_info['dest_relative_path'],
529 | "sha256_checksum": file_info['checksum'],
530 | 'size': file_info['size'],
531 | },
532 | ) as resp:
533 | assert resp.status == 200
534 | upload_json = await resp.json()
535 |
536 | if upload_json['uploadFile'] == False:
537 | file_url = upload_json['file_url']
538 |
539 | print(f"Skipping file {file_name} because it already exists in the cloud.")
540 | num_chunks_uploaded += num_chunks_for_file
541 |
542 | files_data.append([{
543 | "download_url" : file_url,
544 | "dest_relative_path" : file_info['dest_relative_path'],
545 | "sha256_checksum" : file_info['checksum'],
546 | "size" : file_info['size']
547 | }])
548 | continue
549 |
550 | launcher_file_id = upload_json['launcher_file_id']
551 | uploadId = upload_json['uploadId']
552 | presigned_urls = upload_json['signedUrlsList']
553 | objectKey = upload_json['objectKey']
554 |
555 | t = time.time()
556 |
557 | parts = []
558 |
559 | progress_bar = tqdm(
560 | desc=f"Uploading file ({(current_file_index + 1)}/{total_num_files}) {os.path.basename(file_info['path'])}",
561 | unit="B",
562 | unit_scale=True,
563 | total=file_info['size'],
564 | unit_divisor=1024,
565 | )
566 |
567 | with open(file_info['path'], "rb") as f:
568 | chunk_index = 0
569 | while True:
570 | data = f.read(UPLOAD_CHUNK_SIZE)
571 | if not data:
572 | break
573 |
574 | max_retries = 5
575 | num_retries = 0
576 | while num_retries < max_retries:
577 | try:
578 | async with retry_client.put(presigned_urls[chunk_index],data=data) as resp:
579 | assert resp.status == 200
580 | parts.append({
581 | 'ETag': resp.headers['ETag'],
582 | 'PartNumber': chunk_index + 1,
583 | })
584 | break
585 | except:
586 | num_retries += 1
587 | # print(f"Failed to upload chunk {chunk_index} of file {file_name} to {presigned_urls[chunk_index]}... retrying ({num_retries}/{max_retries})")
588 | if num_retries == max_retries:
589 | raise Exception(f"Failed to upload file {os.path.basename(file_info['path'])} after {max_retries} retries.")
590 |
591 | progress_bar.update(len(data))
592 |
593 | chunk_index += 1
594 |
595 | num_chunks_uploaded += 1
596 | EXPORT_PROGRESS = {
597 | "status" : f"uploading files... ({round(100.0 * num_chunks_uploaded / total_num_chunks, 2)}%)",
598 | }
599 |
600 | # Complete the multipart upload for this file
601 | async with retry_client.post(
602 | f"{CW_ENDPOINT}/api/comfyui-launcher/complete_multipart_upload_for_launcher_file",
603 | json={
604 | "parts": parts,
605 | "objectKey": objectKey,
606 | "uploadId": uploadId,
607 | "launcher_file_id" : launcher_file_id
608 | },
609 | ) as resp:
610 | assert resp.status == 200
611 | resp_json = await resp.json()
612 | file_url = resp_json['file_url']
613 | # print("Upload took {0} seconds".format(time.time() - t))
614 |
615 | files_data.append([{
616 | "download_url" : file_url,
617 | "dest_relative_path" : file_info['dest_relative_path'],
618 | "sha256_checksum" : file_info['checksum'],
619 | "size" : file_info['size']
620 | }])
621 |
622 | export_json = generate_export_json_file(
623 | workflow_json=parsed_json,
624 | snapshot_json=snapshot_json,
625 | files_data=files_data,
626 | pip_reqs=pip_packages,
627 | os_type={
628 | "name" : os.name,
629 | "platform" : platform.system()
630 | },
631 | python_version={
632 | "version" : platform.sys.version,
633 | "version_info" : {
634 | "major": sys.version_info.major,
635 | "minor": sys.version_info.minor,
636 | "micro": sys.version_info.micro,
637 | "releaselevel": sys.version_info.releaselevel,
638 | "serial": sys.version_info.serial,
639 | }
640 | }
641 | )
642 |
643 | EXPORT_PROGRESS = {}
644 | print("\n\n")
645 | print(f"Successfully exported workflow.")
646 |
647 | # Now, return a json response with the workflow ID
648 | return web.json_response(export_json)
--------------------------------------------------------------------------------