├── 5090_FLUX-dev-fp8_workflow.json ├── FLUX_Controlnet_canny_GGUF_workflow.json ├── FLUX_controlnet_depth_GGUF.json ├── Flux_Ghibli_i2i_Ollama.json ├── Flux_lora_workflow.json ├── HiDream_dev_28steps.json ├── HiDream_fast_16steps.json ├── HunyuanVideo_12GB_10GB_Workflow.json ├── Hunyuan_video_macbook.json ├── LTX_Video_t2v.json ├── Mochi_text_2_video.json ├── Pulid_flux_workflow.json ├── README.md ├── SD3.5-large-workflow.json ├── SD3.5-turbo-large-workflow.json ├── SD35_Large_lora.json ├── TeaCache_Flux_dev.json ├── TeaCache_HunyuanVideo_T2V.json ├── controlnet_workflows_example.jpg ├── flux1-dev-bnb-nf4-workflow.json ├── flux1-dev-gguf-Q4-workflow.json ├── flux1-schnell-bnb-nf4-workflow.json ├── flux1-schnell-gguf-Q4-workflow.json ├── flux_dev_checkpoint_example.png ├── flux_schnell_checkpoint_fp8.json ├── gguf_macbook_tech_practice_workflow.json ├── hunyuanVideo_lora_workflow.json ├── hunyuanvideo_t2v.json ├── hunyuanvideo_v2v.json ├── wan2.1_IMG2Video.json ├── wan2.1_Macbook_text2video_1.3B.json ├── wan2.1_text2video_1.3B.json ├── workflow_ad_sd15_lcm_lora.json └── workflow_sdxl_lcm_lora.json /FLUX_controlnet_depth_GGUF.json: -------------------------------------------------------------------------------- 1 | { 2 | "last_node_id": 25, 3 | "last_link_id": 42, 4 | "nodes": [ 5 | { 6 | "id": 3, 7 | "type": "XlabsSampler", 8 | "pos": [ 9 | 1007, 10 | 170 11 | ], 12 | "size": { 13 | "0": 342.5999755859375, 14 | "1": 282 15 | }, 16 | "flags": {}, 17 | "order": 12, 18 | "mode": 0, 19 | "inputs": [ 20 | { 21 | "name": "model", 22 | "type": "MODEL", 23 | "link": 42, 24 | "slot_index": 0 25 | }, 26 | { 27 | "name": "conditioning", 28 | "type": "CONDITIONING", 29 | "link": 18 30 | }, 31 | { 32 | "name": "neg_conditioning", 33 | "type": "CONDITIONING", 34 | "link": 26 35 | }, 36 | { 37 | "name": "latent_image", 38 | "type": "LATENT", 39 | "link": 5 40 | }, 41 | { 42 | "name": "controlnet_condition", 43 | "type": "ControlNetCondition", 44 | "link": 28 45 | } 46 | ], 47 | "outputs": [ 48 | { 49 | "name": "latent", 50 | "type": "LATENT", 51 | "links": [ 52 | 6 53 | ], 54 | "shape": 3 55 | } 56 | ], 57 | "properties": { 58 | "Node name for S&R": "XlabsSampler" 59 | }, 60 | "widgets_values": [ 61 | 257762932021984, 62 | "increment", 63 | 25, 64 | 1, 65 | 3.5, 66 | 0, 67 | 1 68 | ] 69 | }, 70 | { 71 | "id": 4, 72 | "type": "DualCLIPLoader", 73 | "pos": [ 74 | -157, 75 | 198 76 | ], 77 | "size": { 78 | "0": 315, 79 | "1": 106 80 | }, 81 | "flags": {}, 82 | "order": 2, 83 | "mode": 0, 84 | "outputs": [ 85 | { 86 | "name": "CLIP", 87 | "type": "CLIP", 88 | "links": [ 89 | 2, 90 | 27 91 | ], 92 | "slot_index": 0, 93 | "shape": 3 94 | } 95 | ], 96 | "properties": { 97 | "Node name for S&R": "DualCLIPLoader" 98 | }, 99 | "widgets_values": [ 100 | "clip_l.safetensors", 101 | "t5xxl_fp8_e4m3fn.safetensors", 102 | "flux" 103 | ] 104 | }, 105 | { 106 | "id": 5, 107 | "type": "CLIPTextEncodeFlux", 108 | "pos": [ 109 | 518, 110 | -63 111 | ], 112 | "size": { 113 | "0": 400, 114 | "1": 200 115 | }, 116 | "flags": {}, 117 | "order": 8, 118 | "mode": 0, 119 | "inputs": [ 120 | { 121 | "name": "clip", 122 | "type": "CLIP", 123 | "link": 2, 124 | "slot_index": 0 125 | } 126 | ], 127 | "outputs": [ 128 | { 129 | "name": "CONDITIONING", 130 | "type": "CONDITIONING", 131 | "links": [ 132 | 18 133 | ], 134 | "slot_index": 0, 135 | "shape": 3 136 | } 137 | ], 138 | "properties": { 139 | "Node name for S&R": "CLIPTextEncodeFlux" 140 | }, 141 | "widgets_values": [ 142 | "man with microphone in the office, anime", 143 | "man with microphone in the office, anime", 144 | 4 145 | ] 146 | }, 147 | { 148 | "id": 6, 149 | "type": "EmptyLatentImage", 150 | "pos": [ 151 | 769, 152 | 430 153 | ], 154 | "size": { 155 | "0": 315, 156 | "1": 106 157 | }, 158 | "flags": {}, 159 | "order": 0, 160 | "mode": 0, 161 | "outputs": [ 162 | { 163 | "name": "LATENT", 164 | "type": "LATENT", 165 | "links": [ 166 | 5 167 | ], 168 | "slot_index": 0, 169 | "shape": 3 170 | } 171 | ], 172 | "properties": { 173 | "Node name for S&R": "EmptyLatentImage" 174 | }, 175 | "widgets_values": [ 176 | 1024, 177 | 1024, 178 | 1 179 | ] 180 | }, 181 | { 182 | "id": 7, 183 | "type": "VAEDecode", 184 | "pos": [ 185 | 1371, 186 | 152 187 | ], 188 | "size": { 189 | "0": 210, 190 | "1": 46 191 | }, 192 | "flags": {}, 193 | "order": 13, 194 | "mode": 0, 195 | "inputs": [ 196 | { 197 | "name": "samples", 198 | "type": "LATENT", 199 | "link": 6, 200 | "slot_index": 0 201 | }, 202 | { 203 | "name": "vae", 204 | "type": "VAE", 205 | "link": 7 206 | } 207 | ], 208 | "outputs": [ 209 | { 210 | "name": "IMAGE", 211 | "type": "IMAGE", 212 | "links": [ 213 | 31, 214 | 41 215 | ], 216 | "slot_index": 0, 217 | "shape": 3 218 | } 219 | ], 220 | "properties": { 221 | "Node name for S&R": "VAEDecode" 222 | } 223 | }, 224 | { 225 | "id": 8, 226 | "type": "VAELoader", 227 | "pos": [ 228 | 1130, 229 | 0 230 | ], 231 | "size": { 232 | "0": 315, 233 | "1": 58 234 | }, 235 | "flags": {}, 236 | "order": 3, 237 | "mode": 0, 238 | "outputs": [ 239 | { 240 | "name": "VAE", 241 | "type": "VAE", 242 | "links": [ 243 | 7 244 | ], 245 | "slot_index": 0, 246 | "shape": 3 247 | } 248 | ], 249 | "properties": { 250 | "Node name for S&R": "VAELoader" 251 | }, 252 | "widgets_values": [ 253 | "ae.safetensors" 254 | ] 255 | }, 256 | { 257 | "id": 10, 258 | "type": "UNETLoader", 259 | "pos": [ 260 | 100, 261 | 594 262 | ], 263 | "size": { 264 | "0": 315, 265 | "1": 82 266 | }, 267 | "flags": {}, 268 | "order": 5, 269 | "mode": 0, 270 | "outputs": [ 271 | { 272 | "name": "MODEL", 273 | "type": "MODEL", 274 | "links": [], 275 | "slot_index": 0, 276 | "shape": 3 277 | } 278 | ], 279 | "properties": { 280 | "Node name for S&R": "UNETLoader" 281 | }, 282 | "widgets_values": [ 283 | "flux1-dev-fp8.safetensors", 284 | "fp8_e4m3fn" 285 | ] 286 | }, 287 | { 288 | "id": 13, 289 | "type": "LoadFluxControlNet", 290 | "pos": [ 291 | 4, 292 | -226 293 | ], 294 | "size": { 295 | "0": 315, 296 | "1": 82 297 | }, 298 | "flags": {}, 299 | "order": 4, 300 | "mode": 0, 301 | "outputs": [ 302 | { 303 | "name": "ControlNet", 304 | "type": "FluxControlNet", 305 | "links": [ 306 | 19 307 | ], 308 | "slot_index": 0, 309 | "shape": 3 310 | } 311 | ], 312 | "properties": { 313 | "Node name for S&R": "LoadFluxControlNet" 314 | }, 315 | "widgets_values": [ 316 | "flux-dev", 317 | "flux-depth-controlnet-v3.safetensors" 318 | ] 319 | }, 320 | { 321 | "id": 14, 322 | "type": "ApplyFluxControlNet", 323 | "pos": [ 324 | 546, 325 | -262 326 | ], 327 | "size": { 328 | "0": 393, 329 | "1": 98 330 | }, 331 | "flags": {}, 332 | "order": 11, 333 | "mode": 0, 334 | "inputs": [ 335 | { 336 | "name": "controlnet", 337 | "type": "FluxControlNet", 338 | "link": 19 339 | }, 340 | { 341 | "name": "image", 342 | "type": "IMAGE", 343 | "link": 40, 344 | "slot_index": 1 345 | }, 346 | { 347 | "name": "controlnet_condition", 348 | "type": "ControlNetCondition", 349 | "link": null 350 | } 351 | ], 352 | "outputs": [ 353 | { 354 | "name": "controlnet_condition", 355 | "type": "ControlNetCondition", 356 | "links": [ 357 | 28 358 | ], 359 | "slot_index": 0, 360 | "shape": 3 361 | } 362 | ], 363 | "properties": { 364 | "Node name for S&R": "ApplyFluxControlNet" 365 | }, 366 | "widgets_values": [ 367 | 0.86 368 | ] 369 | }, 370 | { 371 | "id": 16, 372 | "type": "LoadImage", 373 | "pos": [ 374 | -378, 375 | -239 376 | ], 377 | "size": { 378 | "0": 315, 379 | "1": 314 380 | }, 381 | "flags": {}, 382 | "order": 1, 383 | "mode": 0, 384 | "outputs": [ 385 | { 386 | "name": "IMAGE", 387 | "type": "IMAGE", 388 | "links": [ 389 | 37 390 | ], 391 | "slot_index": 0, 392 | "shape": 3 393 | }, 394 | { 395 | "name": "MASK", 396 | "type": "MASK", 397 | "links": null, 398 | "shape": 3 399 | } 400 | ], 401 | "properties": { 402 | "Node name for S&R": "LoadImage" 403 | }, 404 | "widgets_values": [ 405 | "Snipaste_2024-09-18_16-12-16.png", 406 | "image" 407 | ] 408 | }, 409 | { 410 | "id": 17, 411 | "type": "PreviewImage", 412 | "pos": [ 413 | 330, 414 | 20 415 | ], 416 | "size": { 417 | "0": 210, 418 | "1": 246 419 | }, 420 | "flags": {}, 421 | "order": 10, 422 | "mode": 0, 423 | "inputs": [ 424 | { 425 | "name": "images", 426 | "type": "IMAGE", 427 | "link": 38, 428 | "slot_index": 0 429 | } 430 | ], 431 | "properties": { 432 | "Node name for S&R": "PreviewImage" 433 | } 434 | }, 435 | { 436 | "id": 19, 437 | "type": "CLIPTextEncodeFlux", 438 | "pos": [ 439 | 65, 440 | 281 441 | ], 442 | "size": { 443 | "0": 400, 444 | "1": 200 445 | }, 446 | "flags": {}, 447 | "order": 9, 448 | "mode": 0, 449 | "inputs": [ 450 | { 451 | "name": "clip", 452 | "type": "CLIP", 453 | "link": 27, 454 | "slot_index": 0 455 | } 456 | ], 457 | "outputs": [ 458 | { 459 | "name": "CONDITIONING", 460 | "type": "CONDITIONING", 461 | "links": [ 462 | 26 463 | ], 464 | "slot_index": 0, 465 | "shape": 3 466 | } 467 | ], 468 | "properties": { 469 | "Node name for S&R": "CLIPTextEncodeFlux" 470 | }, 471 | "widgets_values": [ 472 | "", 473 | "", 474 | 4 475 | ] 476 | }, 477 | { 478 | "id": 21, 479 | "type": "PreviewImage", 480 | "pos": [ 481 | 1670, 482 | 124 483 | ], 484 | "size": { 485 | "0": 210, 486 | "1": 246 487 | }, 488 | "flags": {}, 489 | "order": 14, 490 | "mode": 0, 491 | "inputs": [ 492 | { 493 | "name": "images", 494 | "type": "IMAGE", 495 | "link": 31, 496 | "slot_index": 0 497 | } 498 | ], 499 | "properties": { 500 | "Node name for S&R": "PreviewImage" 501 | } 502 | }, 503 | { 504 | "id": 23, 505 | "type": "MiDaS-DepthMapPreprocessor", 506 | "pos": [ 507 | -27, 508 | -50 509 | ], 510 | "size": { 511 | "0": 315, 512 | "1": 106 513 | }, 514 | "flags": {}, 515 | "order": 7, 516 | "mode": 0, 517 | "inputs": [ 518 | { 519 | "name": "image", 520 | "type": "IMAGE", 521 | "link": 37 522 | } 523 | ], 524 | "outputs": [ 525 | { 526 | "name": "IMAGE", 527 | "type": "IMAGE", 528 | "links": [ 529 | 38, 530 | 40 531 | ], 532 | "slot_index": 0, 533 | "shape": 3 534 | } 535 | ], 536 | "properties": { 537 | "Node name for S&R": "MiDaS-DepthMapPreprocessor" 538 | }, 539 | "widgets_values": [ 540 | 6.283185307179586, 541 | 0.1, 542 | 1024 543 | ] 544 | }, 545 | { 546 | "id": 24, 547 | "type": "SaveImage", 548 | "pos": [ 549 | 1487, 550 | 413 551 | ], 552 | "size": { 553 | "0": 315, 554 | "1": 270 555 | }, 556 | "flags": {}, 557 | "order": 15, 558 | "mode": 0, 559 | "inputs": [ 560 | { 561 | "name": "images", 562 | "type": "IMAGE", 563 | "link": 41 564 | } 565 | ], 566 | "properties": {}, 567 | "widgets_values": [ 568 | "depth_res" 569 | ] 570 | }, 571 | { 572 | "id": 25, 573 | "type": "UnetLoaderGGUF", 574 | "pos": [ 575 | 465, 576 | 613 577 | ], 578 | "size": { 579 | "0": 315, 580 | "1": 58 581 | }, 582 | "flags": {}, 583 | "order": 6, 584 | "mode": 0, 585 | "outputs": [ 586 | { 587 | "name": "MODEL", 588 | "type": "MODEL", 589 | "links": [ 590 | 42 591 | ], 592 | "shape": 3, 593 | "slot_index": 0 594 | } 595 | ], 596 | "properties": { 597 | "Node name for S&R": "UnetLoaderGGUF" 598 | }, 599 | "widgets_values": [ 600 | "flux1-dev-Q4_0.gguf" 601 | ] 602 | } 603 | ], 604 | "links": [ 605 | [ 606 | 2, 607 | 4, 608 | 0, 609 | 5, 610 | 0, 611 | "CLIP" 612 | ], 613 | [ 614 | 5, 615 | 6, 616 | 0, 617 | 3, 618 | 3, 619 | "LATENT" 620 | ], 621 | [ 622 | 6, 623 | 3, 624 | 0, 625 | 7, 626 | 0, 627 | "LATENT" 628 | ], 629 | [ 630 | 7, 631 | 8, 632 | 0, 633 | 7, 634 | 1, 635 | "VAE" 636 | ], 637 | [ 638 | 18, 639 | 5, 640 | 0, 641 | 3, 642 | 1, 643 | "CONDITIONING" 644 | ], 645 | [ 646 | 19, 647 | 13, 648 | 0, 649 | 14, 650 | 0, 651 | "FluxControlNet" 652 | ], 653 | [ 654 | 26, 655 | 19, 656 | 0, 657 | 3, 658 | 2, 659 | "CONDITIONING" 660 | ], 661 | [ 662 | 27, 663 | 4, 664 | 0, 665 | 19, 666 | 0, 667 | "CLIP" 668 | ], 669 | [ 670 | 28, 671 | 14, 672 | 0, 673 | 3, 674 | 4, 675 | "ControlNetCondition" 676 | ], 677 | [ 678 | 31, 679 | 7, 680 | 0, 681 | 21, 682 | 0, 683 | "IMAGE" 684 | ], 685 | [ 686 | 37, 687 | 16, 688 | 0, 689 | 23, 690 | 0, 691 | "IMAGE" 692 | ], 693 | [ 694 | 38, 695 | 23, 696 | 0, 697 | 17, 698 | 0, 699 | "IMAGE" 700 | ], 701 | [ 702 | 40, 703 | 23, 704 | 0, 705 | 14, 706 | 1, 707 | "IMAGE" 708 | ], 709 | [ 710 | 41, 711 | 7, 712 | 0, 713 | 24, 714 | 0, 715 | "IMAGE" 716 | ], 717 | [ 718 | 42, 719 | 25, 720 | 0, 721 | 3, 722 | 0, 723 | "MODEL" 724 | ] 725 | ], 726 | "groups": [], 727 | "config": {}, 728 | "extra": { 729 | "ds": { 730 | "scale": 0.672749994932562, 731 | "offset": [ 732 | 340.9553893352698, 733 | 410.2753781823307 734 | ] 735 | } 736 | }, 737 | "version": 0.4 738 | } -------------------------------------------------------------------------------- /Flux_lora_workflow.json: -------------------------------------------------------------------------------- 1 | { 2 | "last_node_id": 38, 3 | "last_link_id": 61, 4 | "nodes": [ 5 | { 6 | "id": 8, 7 | "type": "VAEDecode", 8 | "pos": { 9 | "0": 1151, 10 | "1": 195, 11 | "2": 0, 12 | "3": 0, 13 | "4": 0, 14 | "5": 0, 15 | "6": 0, 16 | "7": 0, 17 | "8": 0, 18 | "9": 0 19 | }, 20 | "size": { 21 | "0": 210, 22 | "1": 46 23 | }, 24 | "flags": {}, 25 | "order": 9, 26 | "mode": 0, 27 | "inputs": [ 28 | { 29 | "name": "samples", 30 | "type": "LATENT", 31 | "link": 52 32 | }, 33 | { 34 | "name": "vae", 35 | "type": "VAE", 36 | "link": 46 37 | } 38 | ], 39 | "outputs": [ 40 | { 41 | "name": "IMAGE", 42 | "type": "IMAGE", 43 | "links": [ 44 | 9 45 | ], 46 | "slot_index": 0 47 | } 48 | ], 49 | "properties": { 50 | "Node name for S&R": "VAEDecode" 51 | } 52 | }, 53 | { 54 | "id": 27, 55 | "type": "EmptySD3LatentImage", 56 | "pos": { 57 | "0": 471, 58 | "1": 455, 59 | "2": 0, 60 | "3": 0, 61 | "4": 0, 62 | "5": 0, 63 | "6": 0, 64 | "7": 0, 65 | "8": 0, 66 | "9": 0 67 | }, 68 | "size": { 69 | "0": 315, 70 | "1": 106 71 | }, 72 | "flags": {}, 73 | "order": 0, 74 | "mode": 0, 75 | "inputs": [], 76 | "outputs": [ 77 | { 78 | "name": "LATENT", 79 | "type": "LATENT", 80 | "links": [ 81 | 51 82 | ], 83 | "slot_index": 0, 84 | "shape": 3 85 | } 86 | ], 87 | "properties": { 88 | "Node name for S&R": "EmptySD3LatentImage" 89 | }, 90 | "widgets_values": [ 91 | 1024, 92 | 1024, 93 | 1 94 | ], 95 | "color": "#323", 96 | "bgcolor": "#535" 97 | }, 98 | { 99 | "id": 33, 100 | "type": "CLIPTextEncode", 101 | "pos": { 102 | "0": 390, 103 | "1": 400, 104 | "2": 0, 105 | "3": 0, 106 | "4": 0, 107 | "5": 0, 108 | "6": 0, 109 | "7": 0, 110 | "8": 0, 111 | "9": 0 112 | }, 113 | "size": { 114 | "0": 422.84503173828125, 115 | "1": 164.31304931640625 116 | }, 117 | "flags": { 118 | "collapsed": true 119 | }, 120 | "order": 4, 121 | "mode": 0, 122 | "inputs": [ 123 | { 124 | "name": "clip", 125 | "type": "CLIP", 126 | "link": 54, 127 | "slot_index": 0 128 | } 129 | ], 130 | "outputs": [ 131 | { 132 | "name": "CONDITIONING", 133 | "type": "CONDITIONING", 134 | "links": [ 135 | 55 136 | ], 137 | "slot_index": 0 138 | } 139 | ], 140 | "title": "CLIP Text Encode (Negative Prompt)", 141 | "properties": { 142 | "Node name for S&R": "CLIPTextEncode" 143 | }, 144 | "widgets_values": [ 145 | "" 146 | ], 147 | "color": "#322", 148 | "bgcolor": "#533" 149 | }, 150 | { 151 | "id": 31, 152 | "type": "KSampler", 153 | "pos": { 154 | "0": 816, 155 | "1": 192, 156 | "2": 0, 157 | "3": 0, 158 | "4": 0, 159 | "5": 0, 160 | "6": 0, 161 | "7": 0, 162 | "8": 0, 163 | "9": 0 164 | }, 165 | "size": { 166 | "0": 315, 167 | "1": 262 168 | }, 169 | "flags": {}, 170 | "order": 8, 171 | "mode": 0, 172 | "inputs": [ 173 | { 174 | "name": "model", 175 | "type": "MODEL", 176 | "link": 61 177 | }, 178 | { 179 | "name": "positive", 180 | "type": "CONDITIONING", 181 | "link": 57 182 | }, 183 | { 184 | "name": "negative", 185 | "type": "CONDITIONING", 186 | "link": 55 187 | }, 188 | { 189 | "name": "latent_image", 190 | "type": "LATENT", 191 | "link": 51 192 | } 193 | ], 194 | "outputs": [ 195 | { 196 | "name": "LATENT", 197 | "type": "LATENT", 198 | "links": [ 199 | 52 200 | ], 201 | "slot_index": 0, 202 | "shape": 3 203 | } 204 | ], 205 | "properties": { 206 | "Node name for S&R": "KSampler" 207 | }, 208 | "widgets_values": [ 209 | 972054013131422, 210 | "increment", 211 | 20, 212 | 1, 213 | "euler", 214 | "simple", 215 | 1 216 | ] 217 | }, 218 | { 219 | "id": 9, 220 | "type": "SaveImage", 221 | "pos": { 222 | "0": 1157, 223 | "1": 307, 224 | "2": 0, 225 | "3": 0, 226 | "4": 0, 227 | "5": 0, 228 | "6": 0, 229 | "7": 0, 230 | "8": 0, 231 | "9": 0 232 | }, 233 | "size": { 234 | "0": 518.797119140625, 235 | "1": 546.2318725585938 236 | }, 237 | "flags": {}, 238 | "order": 10, 239 | "mode": 0, 240 | "inputs": [ 241 | { 242 | "name": "images", 243 | "type": "IMAGE", 244 | "link": 9 245 | } 246 | ], 247 | "outputs": [], 248 | "properties": {}, 249 | "widgets_values": [ 250 | "ComfyUI" 251 | ] 252 | }, 253 | { 254 | "id": 37, 255 | "type": "LoraLoader", 256 | "pos": { 257 | "0": 289, 258 | "1": 623, 259 | "2": 0, 260 | "3": 0, 261 | "4": 0, 262 | "5": 0, 263 | "6": 0, 264 | "7": 0, 265 | "8": 0, 266 | "9": 0 267 | }, 268 | "size": { 269 | "0": 315, 270 | "1": 126 271 | }, 272 | "flags": {}, 273 | "order": 5, 274 | "mode": 0, 275 | "inputs": [ 276 | { 277 | "name": "model", 278 | "type": "MODEL", 279 | "link": 60 280 | }, 281 | { 282 | "name": "clip", 283 | "type": "CLIP", 284 | "link": 58 285 | } 286 | ], 287 | "outputs": [ 288 | { 289 | "name": "MODEL", 290 | "type": "MODEL", 291 | "links": [ 292 | 61 293 | ], 294 | "slot_index": 0, 295 | "shape": 3 296 | }, 297 | { 298 | "name": "CLIP", 299 | "type": "CLIP", 300 | "links": [ 301 | 59 302 | ], 303 | "slot_index": 1, 304 | "shape": 3 305 | } 306 | ], 307 | "properties": { 308 | "Node name for S&R": "LoraLoader" 309 | }, 310 | "widgets_values": [ 311 | "scenery_lora_comfy_converted.safetensors", 312 | 0.6, 313 | 0.6 314 | ] 315 | }, 316 | { 317 | "id": 6, 318 | "type": "CLIPTextEncode", 319 | "pos": { 320 | "0": 384, 321 | "1": 192, 322 | "2": 0, 323 | "3": 0, 324 | "4": 0, 325 | "5": 0, 326 | "6": 0, 327 | "7": 0, 328 | "8": 0, 329 | "9": 0 330 | }, 331 | "size": { 332 | "0": 422.84503173828125, 333 | "1": 164.31304931640625 334 | }, 335 | "flags": {}, 336 | "order": 6, 337 | "mode": 0, 338 | "inputs": [ 339 | { 340 | "name": "clip", 341 | "type": "CLIP", 342 | "link": 59 343 | } 344 | ], 345 | "outputs": [ 346 | { 347 | "name": "CONDITIONING", 348 | "type": "CONDITIONING", 349 | "links": [ 350 | 56 351 | ], 352 | "slot_index": 0 353 | } 354 | ], 355 | "title": "CLIP Text Encode (Positive Prompt)", 356 | "properties": { 357 | "Node name for S&R": "CLIPTextEncode" 358 | }, 359 | "widgets_values": [ 360 | "A mystical forest at twilight, with ancient trees looming like sentinels from the shadows. Their gnarled branches stretch up towards the sky, where the first stars are beginning to twinkle like diamonds. In the underbrush, fireflies flicker on and off, casting a magical glow over the scene. A faint mist rises from the forest floor, imbuing the atmosphere with an air of enchantment and mystery, scenery style." 361 | ], 362 | "color": "#232", 363 | "bgcolor": "#353" 364 | }, 365 | { 366 | "id": 34, 367 | "type": "Note", 368 | "pos": { 369 | "0": 831, 370 | "1": 501, 371 | "2": 0, 372 | "3": 0, 373 | "4": 0, 374 | "5": 0, 375 | "6": 0, 376 | "7": 0, 377 | "8": 0, 378 | "9": 0 379 | }, 380 | "size": { 381 | "0": 282.8617858886719, 382 | "1": 164.08004760742188 383 | }, 384 | "flags": {}, 385 | "order": 1, 386 | "mode": 0, 387 | "inputs": [], 388 | "outputs": [], 389 | "properties": { 390 | "text": "" 391 | }, 392 | "widgets_values": [ 393 | "Note that Flux dev and schnell do not have any negative prompt so CFG should be set to 1.0. Setting CFG to 1.0 means the negative prompt is ignored." 394 | ], 395 | "color": "#432", 396 | "bgcolor": "#653" 397 | }, 398 | { 399 | "id": 30, 400 | "type": "CheckpointLoaderSimple", 401 | "pos": { 402 | "0": -5, 403 | "1": 299, 404 | "2": 0, 405 | "3": 0, 406 | "4": 0, 407 | "5": 0, 408 | "6": 0, 409 | "7": 0, 410 | "8": 0, 411 | "9": 0 412 | }, 413 | "size": { 414 | "0": 315, 415 | "1": 98 416 | }, 417 | "flags": {}, 418 | "order": 2, 419 | "mode": 0, 420 | "inputs": [], 421 | "outputs": [ 422 | { 423 | "name": "MODEL", 424 | "type": "MODEL", 425 | "links": [ 426 | 60 427 | ], 428 | "slot_index": 0, 429 | "shape": 3 430 | }, 431 | { 432 | "name": "CLIP", 433 | "type": "CLIP", 434 | "links": [ 435 | 54, 436 | 58 437 | ], 438 | "slot_index": 1, 439 | "shape": 3 440 | }, 441 | { 442 | "name": "VAE", 443 | "type": "VAE", 444 | "links": [ 445 | 46 446 | ], 447 | "slot_index": 2, 448 | "shape": 3 449 | } 450 | ], 451 | "properties": { 452 | "Node name for S&R": "CheckpointLoaderSimple" 453 | }, 454 | "widgets_values": [ 455 | "flux1-dev-fp8.safetensors" 456 | ] 457 | }, 458 | { 459 | "id": 35, 460 | "type": "FluxGuidance", 461 | "pos": { 462 | "0": 583, 463 | "1": 60, 464 | "2": 0, 465 | "3": 0, 466 | "4": 0, 467 | "5": 0, 468 | "6": 0, 469 | "7": 0, 470 | "8": 0, 471 | "9": 0 472 | }, 473 | "size": { 474 | "0": 211.60000610351562, 475 | "1": 58 476 | }, 477 | "flags": {}, 478 | "order": 7, 479 | "mode": 0, 480 | "inputs": [ 481 | { 482 | "name": "conditioning", 483 | "type": "CONDITIONING", 484 | "link": 56 485 | } 486 | ], 487 | "outputs": [ 488 | { 489 | "name": "CONDITIONING", 490 | "type": "CONDITIONING", 491 | "links": [ 492 | 57 493 | ], 494 | "slot_index": 0, 495 | "shape": 3 496 | } 497 | ], 498 | "properties": { 499 | "Node name for S&R": "FluxGuidance" 500 | }, 501 | "widgets_values": [ 502 | 3.5 503 | ] 504 | }, 505 | { 506 | "id": 38, 507 | "type": "Note", 508 | "pos": { 509 | "0": 644, 510 | "1": 732, 511 | "2": 0, 512 | "3": 0, 513 | "4": 0, 514 | "5": 0, 515 | "6": 0, 516 | "7": 0, 517 | "8": 0, 518 | "9": 0 519 | }, 520 | "size": [ 521 | 282.8617858886719, 522 | 164.08004760742188 523 | ], 524 | "flags": {}, 525 | "order": 3, 526 | "mode": 0, 527 | "inputs": [], 528 | "outputs": [], 529 | "properties": { 530 | "text": "" 531 | }, 532 | "widgets_values": [ 533 | "Author: https://www.youtube.com/@tech-practice9805\n\nFor video tutorial, see: https://youtu.be/HhXPnjFE3uc\n\nLoRA downloads link: https://huggingface.co/XLabs-AI/flux-lora-collection" 534 | ], 535 | "color": "#432", 536 | "bgcolor": "#653" 537 | } 538 | ], 539 | "links": [ 540 | [ 541 | 9, 542 | 8, 543 | 0, 544 | 9, 545 | 0, 546 | "IMAGE" 547 | ], 548 | [ 549 | 46, 550 | 30, 551 | 2, 552 | 8, 553 | 1, 554 | "VAE" 555 | ], 556 | [ 557 | 51, 558 | 27, 559 | 0, 560 | 31, 561 | 3, 562 | "LATENT" 563 | ], 564 | [ 565 | 52, 566 | 31, 567 | 0, 568 | 8, 569 | 0, 570 | "LATENT" 571 | ], 572 | [ 573 | 54, 574 | 30, 575 | 1, 576 | 33, 577 | 0, 578 | "CLIP" 579 | ], 580 | [ 581 | 55, 582 | 33, 583 | 0, 584 | 31, 585 | 2, 586 | "CONDITIONING" 587 | ], 588 | [ 589 | 56, 590 | 6, 591 | 0, 592 | 35, 593 | 0, 594 | "CONDITIONING" 595 | ], 596 | [ 597 | 57, 598 | 35, 599 | 0, 600 | 31, 601 | 1, 602 | "CONDITIONING" 603 | ], 604 | [ 605 | 58, 606 | 30, 607 | 1, 608 | 37, 609 | 1, 610 | "CLIP" 611 | ], 612 | [ 613 | 59, 614 | 37, 615 | 1, 616 | 6, 617 | 0, 618 | "CLIP" 619 | ], 620 | [ 621 | 60, 622 | 30, 623 | 0, 624 | 37, 625 | 0, 626 | "MODEL" 627 | ], 628 | [ 629 | 61, 630 | 37, 631 | 0, 632 | 31, 633 | 0, 634 | "MODEL" 635 | ] 636 | ], 637 | "groups": [], 638 | "config": {}, 639 | "extra": { 640 | "ds": { 641 | "scale": 0.8264462809917354, 642 | "offset": [ 643 | 81.46376898485687, 644 | 32.34076275141956 645 | ] 646 | } 647 | }, 648 | "version": 0.4 649 | } -------------------------------------------------------------------------------- /HiDream_dev_28steps.json: -------------------------------------------------------------------------------- 1 | { 2 | "id": "01d66ae9-78be-4a8d-b737-24eee5e1d447", 3 | "revision": 0, 4 | "last_node_id": 73, 5 | "last_link_id": 163, 6 | "nodes": [ 7 | { 8 | "id": 40, 9 | "type": "CLIPTextEncode", 10 | "pos": [ 11 | 530, 12 | 260 13 | ], 14 | "size": [ 15 | 432, 16 | 192 17 | ], 18 | "flags": { 19 | "collapsed": true 20 | }, 21 | "order": 8, 22 | "mode": 0, 23 | "inputs": [ 24 | { 25 | "name": "clip", 26 | "type": "CLIP", 27 | "link": 112 28 | } 29 | ], 30 | "outputs": [ 31 | { 32 | "name": "CONDITIONING", 33 | "type": "CONDITIONING", 34 | "slot_index": 0, 35 | "links": [ 36 | 114 37 | ] 38 | } 39 | ], 40 | "title": "Negative Prompt", 41 | "properties": { 42 | "cnr_id": "comfy-core", 43 | "ver": "0.3.29", 44 | "Node name for S&R": "CLIPTextEncode" 45 | }, 46 | "widgets_values": [ 47 | "bad ugly jpeg artifacts" 48 | ], 49 | "color": "#322", 50 | "bgcolor": "#533" 51 | }, 52 | { 53 | "id": 70, 54 | "type": "ModelSamplingSD3", 55 | "pos": [ 56 | 750, 57 | -90 58 | ], 59 | "size": [ 60 | 210, 61 | 58 62 | ], 63 | "flags": {}, 64 | "order": 6, 65 | "mode": 0, 66 | "inputs": [ 67 | { 68 | "name": "model", 69 | "type": "MODEL", 70 | "link": 162 71 | } 72 | ], 73 | "outputs": [ 74 | { 75 | "name": "MODEL", 76 | "type": "MODEL", 77 | "links": [ 78 | 163 79 | ] 80 | } 81 | ], 82 | "properties": { 83 | "cnr_id": "comfy-core", 84 | "ver": "0.3.29", 85 | "Node name for S&R": "ModelSamplingSD3" 86 | }, 87 | "widgets_values": [ 88 | 6.000000000000001 89 | ] 90 | }, 91 | { 92 | "id": 8, 93 | "type": "VAEDecode", 94 | "pos": [ 95 | 1340, 96 | -20 97 | ], 98 | "size": [ 99 | 210, 100 | 46 101 | ], 102 | "flags": {}, 103 | "order": 10, 104 | "mode": 0, 105 | "inputs": [ 106 | { 107 | "name": "samples", 108 | "type": "LATENT", 109 | "link": 160 110 | }, 111 | { 112 | "name": "vae", 113 | "type": "VAE", 114 | "link": 107 115 | } 116 | ], 117 | "outputs": [ 118 | { 119 | "name": "IMAGE", 120 | "type": "IMAGE", 121 | "slot_index": 0, 122 | "links": [ 123 | 51 124 | ] 125 | } 126 | ], 127 | "properties": { 128 | "cnr_id": "comfy-core", 129 | "ver": "0.3.29", 130 | "Node name for S&R": "VAEDecode" 131 | }, 132 | "widgets_values": [] 133 | }, 134 | { 135 | "id": 55, 136 | "type": "VAELoader", 137 | "pos": [ 138 | 1010, 139 | 290 140 | ], 141 | "size": [ 142 | 310, 143 | 60 144 | ], 145 | "flags": {}, 146 | "order": 0, 147 | "mode": 0, 148 | "inputs": [], 149 | "outputs": [ 150 | { 151 | "name": "VAE", 152 | "type": "VAE", 153 | "links": [ 154 | 107 155 | ] 156 | } 157 | ], 158 | "properties": { 159 | "cnr_id": "comfy-core", 160 | "ver": "0.3.29", 161 | "Node name for S&R": "VAELoader" 162 | }, 163 | "widgets_values": [ 164 | "ae.safetensors" 165 | ] 166 | }, 167 | { 168 | "id": 53, 169 | "type": "EmptySD3LatentImage", 170 | "pos": [ 171 | 620, 172 | 340 173 | ], 174 | "size": [ 175 | 315, 176 | 106 177 | ], 178 | "flags": {}, 179 | "order": 1, 180 | "mode": 0, 181 | "inputs": [], 182 | "outputs": [ 183 | { 184 | "name": "LATENT", 185 | "type": "LATENT", 186 | "slot_index": 0, 187 | "links": [ 188 | 100 189 | ] 190 | } 191 | ], 192 | "properties": { 193 | "cnr_id": "comfy-core", 194 | "ver": "0.3.29", 195 | "Node name for S&R": "EmptySD3LatentImage" 196 | }, 197 | "widgets_values": [ 198 | 1024, 199 | 1024, 200 | 1 201 | ] 202 | }, 203 | { 204 | "id": 72, 205 | "type": "MarkdownNote", 206 | "pos": [ 207 | 1010, 208 | 430 209 | ], 210 | "size": [ 211 | 300, 212 | 450 213 | ], 214 | "flags": {}, 215 | "order": 2, 216 | "mode": 0, 217 | "inputs": [], 218 | "outputs": [], 219 | "title": "Sampling Settings", 220 | "properties": {}, 221 | "widgets_values": [ 222 | "## Official sampling settings\n\nProvided for reference, my workflows may have slightly different settings.\n\n### HiDream Full\n\n* hidream_i1_full_fp16.safetensors\n* shift: 3.0\n* steps: 50\n* sampler: uni_pc\n* scheduler: simple\n* cfg: 5.0\n\n### HiDream Dev\n\n* hidream_i1_dev_bf16.safetensors\n* shift: 6.0\n* steps: 28\n* sampler: lcm\n* scheduler: normal\n* cfg: 1.0 (no negative prompt)\n\n### HiDream Fast\n\n* hidream_i1_fast_bf16.safetensors\n* shift: 3.0\n* steps: 16\n* sampler: lcm\n* scheduler: normal\n* cfg: 1.0 (no negative prompt)\n" 223 | ], 224 | "color": "#432", 225 | "bgcolor": "#653" 226 | }, 227 | { 228 | "id": 73, 229 | "type": "Note", 230 | "pos": [ 231 | 230, 232 | -220 233 | ], 234 | "size": [ 235 | 250, 236 | 88 237 | ], 238 | "flags": {}, 239 | "order": 3, 240 | "mode": 0, 241 | "inputs": [], 242 | "outputs": [], 243 | "properties": {}, 244 | "widgets_values": [ 245 | "You can try changing the weight_dtype to fp8 if you are running out of memory." 246 | ], 247 | "color": "#432", 248 | "bgcolor": "#653" 249 | }, 250 | { 251 | "id": 69, 252 | "type": "UNETLoader", 253 | "pos": [ 254 | 80, 255 | -90 256 | ], 257 | "size": [ 258 | 400, 259 | 82 260 | ], 261 | "flags": {}, 262 | "order": 4, 263 | "mode": 0, 264 | "inputs": [], 265 | "outputs": [ 266 | { 267 | "name": "MODEL", 268 | "type": "MODEL", 269 | "links": [ 270 | 162 271 | ] 272 | } 273 | ], 274 | "properties": { 275 | "cnr_id": "comfy-core", 276 | "ver": "0.3.29", 277 | "Node name for S&R": "UNETLoader" 278 | }, 279 | "widgets_values": [ 280 | "hidream_i1_dev_fp8.safetensors", 281 | "default" 282 | ], 283 | "color": "#223", 284 | "bgcolor": "#335" 285 | }, 286 | { 287 | "id": 9, 288 | "type": "SaveImage", 289 | "pos": [ 290 | 1494.6988525390625, 291 | -63.880558013916016 292 | ], 293 | "size": [ 294 | 966.615966796875, 295 | 1025.3206787109375 296 | ], 297 | "flags": {}, 298 | "order": 11, 299 | "mode": 0, 300 | "inputs": [ 301 | { 302 | "name": "images", 303 | "type": "IMAGE", 304 | "link": 51 305 | } 306 | ], 307 | "outputs": [], 308 | "properties": { 309 | "cnr_id": "comfy-core", 310 | "ver": "0.3.29", 311 | "Node name for S&R": "SaveImage" 312 | }, 313 | "widgets_values": [ 314 | "ComfyUI" 315 | ] 316 | }, 317 | { 318 | "id": 54, 319 | "type": "QuadrupleCLIPLoader", 320 | "pos": [ 321 | 80, 322 | 50 323 | ], 324 | "size": [ 325 | 400, 326 | 130 327 | ], 328 | "flags": {}, 329 | "order": 5, 330 | "mode": 0, 331 | "inputs": [], 332 | "outputs": [ 333 | { 334 | "name": "CLIP", 335 | "type": "CLIP", 336 | "slot_index": 0, 337 | "links": [ 338 | 111, 339 | 112 340 | ] 341 | } 342 | ], 343 | "properties": { 344 | "cnr_id": "comfy-core", 345 | "ver": "0.3.29", 346 | "Node name for S&R": "QuadrupleCLIPLoader" 347 | }, 348 | "widgets_values": [ 349 | "clip_l_hidream.safetensors", 350 | "clip_g_hidream.safetensors", 351 | "t5xxl_fp8_e4m3fn_scaled.safetensors", 352 | "llama_3.1_8b_instruct_fp8_scaled.safetensors" 353 | ], 354 | "color": "#223", 355 | "bgcolor": "#335" 356 | }, 357 | { 358 | "id": 16, 359 | "type": "CLIPTextEncode", 360 | "pos": [ 361 | 530, 362 | 20 363 | ], 364 | "size": [ 365 | 432, 366 | 192 367 | ], 368 | "flags": {}, 369 | "order": 7, 370 | "mode": 0, 371 | "inputs": [ 372 | { 373 | "name": "clip", 374 | "type": "CLIP", 375 | "link": 111 376 | } 377 | ], 378 | "outputs": [ 379 | { 380 | "name": "CONDITIONING", 381 | "type": "CONDITIONING", 382 | "slot_index": 0, 383 | "links": [ 384 | 21 385 | ] 386 | } 387 | ], 388 | "title": "Positive Prompt", 389 | "properties": { 390 | "cnr_id": "comfy-core", 391 | "ver": "0.3.29", 392 | "Node name for S&R": "CLIPTextEncode" 393 | }, 394 | "widgets_values": [ 395 | "one young red head women hold a cute sign saying \"Tech Practice\" with one heart emoj on the sign" 396 | ], 397 | "color": "#232", 398 | "bgcolor": "#353" 399 | }, 400 | { 401 | "id": 3, 402 | "type": "KSampler", 403 | "pos": [ 404 | 1010, 405 | -20 406 | ], 407 | "size": [ 408 | 310, 409 | 262 410 | ], 411 | "flags": {}, 412 | "order": 9, 413 | "mode": 0, 414 | "inputs": [ 415 | { 416 | "name": "model", 417 | "type": "MODEL", 418 | "link": 163 419 | }, 420 | { 421 | "name": "positive", 422 | "type": "CONDITIONING", 423 | "link": 21 424 | }, 425 | { 426 | "name": "negative", 427 | "type": "CONDITIONING", 428 | "link": 114 429 | }, 430 | { 431 | "name": "latent_image", 432 | "type": "LATENT", 433 | "link": 100 434 | } 435 | ], 436 | "outputs": [ 437 | { 438 | "name": "LATENT", 439 | "type": "LATENT", 440 | "slot_index": 0, 441 | "links": [ 442 | 160 443 | ] 444 | } 445 | ], 446 | "properties": { 447 | "cnr_id": "comfy-core", 448 | "ver": "0.3.29", 449 | "Node name for S&R": "KSampler" 450 | }, 451 | "widgets_values": [ 452 | 147638433643750, 453 | "increment", 454 | 28, 455 | 1, 456 | "lcm", 457 | "normal", 458 | 1 459 | ] 460 | } 461 | ], 462 | "links": [ 463 | [ 464 | 21, 465 | 16, 466 | 0, 467 | 3, 468 | 1, 469 | "CONDITIONING" 470 | ], 471 | [ 472 | 51, 473 | 8, 474 | 0, 475 | 9, 476 | 0, 477 | "IMAGE" 478 | ], 479 | [ 480 | 100, 481 | 53, 482 | 0, 483 | 3, 484 | 3, 485 | "LATENT" 486 | ], 487 | [ 488 | 107, 489 | 55, 490 | 0, 491 | 8, 492 | 1, 493 | "VAE" 494 | ], 495 | [ 496 | 111, 497 | 54, 498 | 0, 499 | 16, 500 | 0, 501 | "CLIP" 502 | ], 503 | [ 504 | 112, 505 | 54, 506 | 0, 507 | 40, 508 | 0, 509 | "CLIP" 510 | ], 511 | [ 512 | 114, 513 | 40, 514 | 0, 515 | 3, 516 | 2, 517 | "CONDITIONING" 518 | ], 519 | [ 520 | 160, 521 | 3, 522 | 0, 523 | 8, 524 | 0, 525 | "LATENT" 526 | ], 527 | [ 528 | 162, 529 | 69, 530 | 0, 531 | 70, 532 | 0, 533 | "MODEL" 534 | ], 535 | [ 536 | 163, 537 | 70, 538 | 0, 539 | 3, 540 | 0, 541 | "MODEL" 542 | ] 543 | ], 544 | "groups": [], 545 | "config": {}, 546 | "extra": { 547 | "ds": { 548 | "scale": 0.8390545288824038, 549 | "offset": [ 550 | -30.37114011388864, 551 | 379.2463409104089 552 | ] 553 | }, 554 | "frontendVersion": "1.17.11", 555 | "VHS_latentpreview": false, 556 | "VHS_latentpreviewrate": 0, 557 | "VHS_MetadataImage": true, 558 | "VHS_KeepIntermediate": true 559 | }, 560 | "version": 0.4 561 | } -------------------------------------------------------------------------------- /HiDream_fast_16steps.json: -------------------------------------------------------------------------------- 1 | { 2 | "id": "14053473-2dd2-4e42-9cf7-307db25fa05e", 3 | "revision": 0, 4 | "last_node_id": 73, 5 | "last_link_id": 163, 6 | "nodes": [ 7 | { 8 | "id": 8, 9 | "type": "VAEDecode", 10 | "pos": [ 11 | 1340, 12 | -20 13 | ], 14 | "size": [ 15 | 210, 16 | 46 17 | ], 18 | "flags": {}, 19 | "order": 10, 20 | "mode": 0, 21 | "inputs": [ 22 | { 23 | "name": "samples", 24 | "type": "LATENT", 25 | "link": 160 26 | }, 27 | { 28 | "name": "vae", 29 | "type": "VAE", 30 | "link": 107 31 | } 32 | ], 33 | "outputs": [ 34 | { 35 | "name": "IMAGE", 36 | "type": "IMAGE", 37 | "slot_index": 0, 38 | "links": [ 39 | 51 40 | ] 41 | } 42 | ], 43 | "properties": { 44 | "cnr_id": "comfy-core", 45 | "ver": "0.3.29", 46 | "Node name for S&R": "VAEDecode" 47 | }, 48 | "widgets_values": [] 49 | }, 50 | { 51 | "id": 55, 52 | "type": "VAELoader", 53 | "pos": [ 54 | 1010, 55 | 290 56 | ], 57 | "size": [ 58 | 310, 59 | 60 60 | ], 61 | "flags": {}, 62 | "order": 0, 63 | "mode": 0, 64 | "inputs": [], 65 | "outputs": [ 66 | { 67 | "name": "VAE", 68 | "type": "VAE", 69 | "links": [ 70 | 107 71 | ] 72 | } 73 | ], 74 | "properties": { 75 | "cnr_id": "comfy-core", 76 | "ver": "0.3.29", 77 | "Node name for S&R": "VAELoader" 78 | }, 79 | "widgets_values": [ 80 | "ae.safetensors" 81 | ] 82 | }, 83 | { 84 | "id": 54, 85 | "type": "QuadrupleCLIPLoader", 86 | "pos": [ 87 | 80, 88 | 50 89 | ], 90 | "size": [ 91 | 400, 92 | 130 93 | ], 94 | "flags": {}, 95 | "order": 1, 96 | "mode": 0, 97 | "inputs": [], 98 | "outputs": [ 99 | { 100 | "name": "CLIP", 101 | "type": "CLIP", 102 | "slot_index": 0, 103 | "links": [ 104 | 111, 105 | 112 106 | ] 107 | } 108 | ], 109 | "properties": { 110 | "cnr_id": "comfy-core", 111 | "ver": "0.3.29", 112 | "Node name for S&R": "QuadrupleCLIPLoader" 113 | }, 114 | "widgets_values": [ 115 | "clip_l_hidream.safetensors", 116 | "clip_g_hidream.safetensors", 117 | "t5xxl_fp8_e4m3fn_scaled.safetensors", 118 | "llama_3.1_8b_instruct_fp8_scaled.safetensors" 119 | ], 120 | "color": "#223", 121 | "bgcolor": "#335" 122 | }, 123 | { 124 | "id": 9, 125 | "type": "SaveImage", 126 | "pos": [ 127 | 1570, 128 | -20 129 | ], 130 | "size": [ 131 | 966.615966796875, 132 | 1025.3206787109375 133 | ], 134 | "flags": {}, 135 | "order": 11, 136 | "mode": 0, 137 | "inputs": [ 138 | { 139 | "name": "images", 140 | "type": "IMAGE", 141 | "link": 51 142 | } 143 | ], 144 | "outputs": [], 145 | "properties": { 146 | "cnr_id": "comfy-core", 147 | "ver": "0.3.29", 148 | "Node name for S&R": "SaveImage" 149 | }, 150 | "widgets_values": [ 151 | "ComfyUI" 152 | ] 153 | }, 154 | { 155 | "id": 72, 156 | "type": "MarkdownNote", 157 | "pos": [ 158 | 1010, 159 | 430 160 | ], 161 | "size": [ 162 | 300, 163 | 450 164 | ], 165 | "flags": {}, 166 | "order": 2, 167 | "mode": 0, 168 | "inputs": [], 169 | "outputs": [], 170 | "title": "Sampling Settings", 171 | "properties": {}, 172 | "widgets_values": [ 173 | "## Official sampling settings\n\nProvided for reference, my workflows may have slightly different settings.\n\n### HiDream Full\n\n* hidream_i1_full_fp16.safetensors\n* shift: 3.0\n* steps: 50\n* sampler: uni_pc\n* scheduler: simple\n* cfg: 5.0\n\n### HiDream Dev\n\n* hidream_i1_dev_bf16.safetensors\n* shift: 6.0\n* steps: 28\n* sampler: lcm\n* scheduler: normal\n* cfg: 1.0 (no negative prompt)\n\n### HiDream Fast\n\n* hidream_i1_fast_bf16.safetensors\n* shift: 3.0\n* steps: 16\n* sampler: lcm\n* scheduler: normal\n* cfg: 1.0 (no negative prompt)\n" 174 | ], 175 | "color": "#432", 176 | "bgcolor": "#653" 177 | }, 178 | { 179 | "id": 70, 180 | "type": "ModelSamplingSD3", 181 | "pos": [ 182 | 750, 183 | -90 184 | ], 185 | "size": [ 186 | 210, 187 | 58 188 | ], 189 | "flags": {}, 190 | "order": 8, 191 | "mode": 0, 192 | "inputs": [ 193 | { 194 | "name": "model", 195 | "type": "MODEL", 196 | "link": 162 197 | } 198 | ], 199 | "outputs": [ 200 | { 201 | "name": "MODEL", 202 | "type": "MODEL", 203 | "links": [ 204 | 163 205 | ] 206 | } 207 | ], 208 | "properties": { 209 | "cnr_id": "comfy-core", 210 | "ver": "0.3.29", 211 | "Node name for S&R": "ModelSamplingSD3" 212 | }, 213 | "widgets_values": [ 214 | 3.0000000000000004 215 | ] 216 | }, 217 | { 218 | "id": 53, 219 | "type": "EmptySD3LatentImage", 220 | "pos": [ 221 | 640, 222 | 500 223 | ], 224 | "size": [ 225 | 315, 226 | 106 227 | ], 228 | "flags": {}, 229 | "order": 3, 230 | "mode": 0, 231 | "inputs": [], 232 | "outputs": [ 233 | { 234 | "name": "LATENT", 235 | "type": "LATENT", 236 | "slot_index": 0, 237 | "links": [ 238 | 100 239 | ] 240 | } 241 | ], 242 | "properties": { 243 | "cnr_id": "comfy-core", 244 | "ver": "0.3.29", 245 | "Node name for S&R": "EmptySD3LatentImage" 246 | }, 247 | "widgets_values": [ 248 | 1024, 249 | 1024, 250 | 1 251 | ] 252 | }, 253 | { 254 | "id": 73, 255 | "type": "Note", 256 | "pos": [ 257 | 221.36090087890625, 258 | -222.29476928710938 259 | ], 260 | "size": [ 261 | 250, 262 | 88 263 | ], 264 | "flags": {}, 265 | "order": 4, 266 | "mode": 0, 267 | "inputs": [], 268 | "outputs": [], 269 | "properties": {}, 270 | "widgets_values": [ 271 | "You can try changing the weight_dtype to fp8 if you are running out of memory." 272 | ], 273 | "color": "#432", 274 | "bgcolor": "#653" 275 | }, 276 | { 277 | "id": 40, 278 | "type": "CLIPTextEncode", 279 | "pos": [ 280 | 530, 281 | 260 282 | ], 283 | "size": [ 284 | 432, 285 | 192 286 | ], 287 | "flags": { 288 | "collapsed": false 289 | }, 290 | "order": 7, 291 | "mode": 0, 292 | "inputs": [ 293 | { 294 | "name": "clip", 295 | "type": "CLIP", 296 | "link": 112 297 | } 298 | ], 299 | "outputs": [ 300 | { 301 | "name": "CONDITIONING", 302 | "type": "CONDITIONING", 303 | "slot_index": 0, 304 | "links": [ 305 | 114 306 | ] 307 | } 308 | ], 309 | "title": "Negative Prompt", 310 | "properties": { 311 | "cnr_id": "comfy-core", 312 | "ver": "0.3.29", 313 | "Node name for S&R": "CLIPTextEncode" 314 | }, 315 | "widgets_values": [ 316 | "blurry" 317 | ], 318 | "color": "#322", 319 | "bgcolor": "#533" 320 | }, 321 | { 322 | "id": 69, 323 | "type": "UNETLoader", 324 | "pos": [ 325 | 80, 326 | -90 327 | ], 328 | "size": [ 329 | 400, 330 | 82 331 | ], 332 | "flags": {}, 333 | "order": 5, 334 | "mode": 0, 335 | "inputs": [], 336 | "outputs": [ 337 | { 338 | "name": "MODEL", 339 | "type": "MODEL", 340 | "links": [ 341 | 162 342 | ] 343 | } 344 | ], 345 | "properties": { 346 | "cnr_id": "comfy-core", 347 | "ver": "0.3.29", 348 | "Node name for S&R": "UNETLoader" 349 | }, 350 | "widgets_values": [ 351 | "hidream_i1_fast_fp8.safetensors", 352 | "default" 353 | ], 354 | "color": "#223", 355 | "bgcolor": "#335" 356 | }, 357 | { 358 | "id": 3, 359 | "type": "KSampler", 360 | "pos": [ 361 | 1010, 362 | -20 363 | ], 364 | "size": [ 365 | 310, 366 | 474 367 | ], 368 | "flags": {}, 369 | "order": 9, 370 | "mode": 0, 371 | "inputs": [ 372 | { 373 | "name": "model", 374 | "type": "MODEL", 375 | "link": 163 376 | }, 377 | { 378 | "name": "positive", 379 | "type": "CONDITIONING", 380 | "link": 21 381 | }, 382 | { 383 | "name": "negative", 384 | "type": "CONDITIONING", 385 | "link": 114 386 | }, 387 | { 388 | "name": "latent_image", 389 | "type": "LATENT", 390 | "link": 100 391 | } 392 | ], 393 | "outputs": [ 394 | { 395 | "name": "LATENT", 396 | "type": "LATENT", 397 | "slot_index": 0, 398 | "links": [ 399 | 160 400 | ] 401 | } 402 | ], 403 | "properties": { 404 | "cnr_id": "comfy-core", 405 | "ver": "0.3.29", 406 | "Node name for S&R": "KSampler" 407 | }, 408 | "widgets_values": [ 409 | 221267224284114, 410 | "increment", 411 | 16, 412 | 1, 413 | "lcm", 414 | "normal", 415 | 1 416 | ] 417 | }, 418 | { 419 | "id": 16, 420 | "type": "CLIPTextEncode", 421 | "pos": [ 422 | 530, 423 | 20 424 | ], 425 | "size": [ 426 | 432, 427 | 192 428 | ], 429 | "flags": {}, 430 | "order": 6, 431 | "mode": 0, 432 | "inputs": [ 433 | { 434 | "name": "clip", 435 | "type": "CLIP", 436 | "link": 111 437 | } 438 | ], 439 | "outputs": [ 440 | { 441 | "name": "CONDITIONING", 442 | "type": "CONDITIONING", 443 | "slot_index": 0, 444 | "links": [ 445 | 21 446 | ] 447 | } 448 | ], 449 | "title": "Positive Prompt", 450 | "properties": { 451 | "cnr_id": "comfy-core", 452 | "ver": "0.3.29", 453 | "Node name for S&R": "CLIPTextEncode" 454 | }, 455 | "widgets_values": [ 456 | "one young red head women hold a cute sign saying \"Tech Practice\" with one heart emoj on the sign" 457 | ], 458 | "color": "#232", 459 | "bgcolor": "#353" 460 | } 461 | ], 462 | "links": [ 463 | [ 464 | 21, 465 | 16, 466 | 0, 467 | 3, 468 | 1, 469 | "CONDITIONING" 470 | ], 471 | [ 472 | 51, 473 | 8, 474 | 0, 475 | 9, 476 | 0, 477 | "IMAGE" 478 | ], 479 | [ 480 | 100, 481 | 53, 482 | 0, 483 | 3, 484 | 3, 485 | "LATENT" 486 | ], 487 | [ 488 | 107, 489 | 55, 490 | 0, 491 | 8, 492 | 1, 493 | "VAE" 494 | ], 495 | [ 496 | 111, 497 | 54, 498 | 0, 499 | 16, 500 | 0, 501 | "CLIP" 502 | ], 503 | [ 504 | 112, 505 | 54, 506 | 0, 507 | 40, 508 | 0, 509 | "CLIP" 510 | ], 511 | [ 512 | 114, 513 | 40, 514 | 0, 515 | 3, 516 | 2, 517 | "CONDITIONING" 518 | ], 519 | [ 520 | 160, 521 | 3, 522 | 0, 523 | 8, 524 | 0, 525 | "LATENT" 526 | ], 527 | [ 528 | 162, 529 | 69, 530 | 0, 531 | 70, 532 | 0, 533 | "MODEL" 534 | ], 535 | [ 536 | 163, 537 | 70, 538 | 0, 539 | 3, 540 | 0, 541 | "MODEL" 542 | ] 543 | ], 544 | "groups": [], 545 | "config": {}, 546 | "extra": { 547 | "ds": { 548 | "scale": 0.8390545288824088, 549 | "offset": [ 550 | 510.69875438523576, 551 | 415.8310868264834 552 | ] 553 | }, 554 | "frontendVersion": "1.17.11", 555 | "VHS_latentpreview": false, 556 | "VHS_latentpreviewrate": 0, 557 | "VHS_MetadataImage": true, 558 | "VHS_KeepIntermediate": true 559 | }, 560 | "version": 0.4 561 | } -------------------------------------------------------------------------------- /Mochi_text_2_video.json: -------------------------------------------------------------------------------- 1 | { 2 | "last_node_id": 41, 3 | "last_link_id": 79, 4 | "nodes": [ 5 | { 6 | "id": 7, 7 | "type": "CLIPTextEncode", 8 | "pos": { 9 | "0": 413, 10 | "1": 389 11 | }, 12 | "size": { 13 | "0": 425.27801513671875, 14 | "1": 180.6060791015625 15 | }, 16 | "flags": {}, 17 | "order": 7, 18 | "mode": 0, 19 | "inputs": [ 20 | { 21 | "name": "clip", 22 | "type": "CLIP", 23 | "link": 75 24 | } 25 | ], 26 | "outputs": [ 27 | { 28 | "name": "CONDITIONING", 29 | "type": "CONDITIONING", 30 | "links": [ 31 | 52 32 | ], 33 | "slot_index": 0 34 | } 35 | ], 36 | "properties": { 37 | "Node name for S&R": "CLIPTextEncode" 38 | }, 39 | "widgets_values": [ 40 | "", 41 | true 42 | ] 43 | }, 44 | { 45 | "id": 8, 46 | "type": "VAEDecode", 47 | "pos": { 48 | "0": 1210, 49 | "1": 190 50 | }, 51 | "size": { 52 | "0": 210, 53 | "1": 46 54 | }, 55 | "flags": {}, 56 | "order": 9, 57 | "mode": 0, 58 | "inputs": [ 59 | { 60 | "name": "samples", 61 | "type": "LATENT", 62 | "link": 35 63 | }, 64 | { 65 | "name": "vae", 66 | "type": "VAE", 67 | "link": 76 68 | } 69 | ], 70 | "outputs": [ 71 | { 72 | "name": "IMAGE", 73 | "type": "IMAGE", 74 | "links": [ 75 | 56 76 | ], 77 | "slot_index": 0 78 | } 79 | ], 80 | "properties": { 81 | "Node name for S&R": "VAEDecode" 82 | }, 83 | "widgets_values": [] 84 | }, 85 | { 86 | "id": 3, 87 | "type": "KSampler", 88 | "pos": { 89 | "0": 863, 90 | "1": 187 91 | }, 92 | "size": { 93 | "0": 315, 94 | "1": 262 95 | }, 96 | "flags": {}, 97 | "order": 8, 98 | "mode": 0, 99 | "inputs": [ 100 | { 101 | "name": "model", 102 | "type": "MODEL", 103 | "link": 79 104 | }, 105 | { 106 | "name": "positive", 107 | "type": "CONDITIONING", 108 | "link": 46 109 | }, 110 | { 111 | "name": "negative", 112 | "type": "CONDITIONING", 113 | "link": 52 114 | }, 115 | { 116 | "name": "latent_image", 117 | "type": "LATENT", 118 | "link": 38 119 | } 120 | ], 121 | "outputs": [ 122 | { 123 | "name": "LATENT", 124 | "type": "LATENT", 125 | "links": [ 126 | 35 127 | ], 128 | "slot_index": 0 129 | } 130 | ], 131 | "properties": { 132 | "Node name for S&R": "KSampler" 133 | }, 134 | "widgets_values": [ 135 | 781839250933514, 136 | "randomize", 137 | 30, 138 | 4.5, 139 | "euler", 140 | "simple", 141 | 1 142 | ] 143 | }, 144 | { 145 | "id": 39, 146 | "type": "VAELoader", 147 | "pos": { 148 | "0": 890, 149 | "1": 500 150 | }, 151 | "size": { 152 | "0": 278.68310546875, 153 | "1": 58 154 | }, 155 | "flags": {}, 156 | "order": 0, 157 | "mode": 0, 158 | "inputs": [], 159 | "outputs": [ 160 | { 161 | "name": "VAE", 162 | "type": "VAE", 163 | "links": [ 164 | 76 165 | ] 166 | } 167 | ], 168 | "properties": { 169 | "Node name for S&R": "VAELoader" 170 | }, 171 | "widgets_values": [ 172 | "mochi_vae.safetensors" 173 | ] 174 | }, 175 | { 176 | "id": 28, 177 | "type": "SaveAnimatedWEBP", 178 | "pos": { 179 | "0": 1432, 180 | "1": 111 181 | }, 182 | "size": { 183 | "0": 847.3048706054688, 184 | "1": 602.0325317382812 185 | }, 186 | "flags": {}, 187 | "order": 10, 188 | "mode": 0, 189 | "inputs": [ 190 | { 191 | "name": "images", 192 | "type": "IMAGE", 193 | "link": 56 194 | } 195 | ], 196 | "outputs": [], 197 | "properties": { 198 | "Node name for S&R": "SaveAnimatedWEBP" 199 | }, 200 | "widgets_values": [ 201 | "ComfyUI", 202 | 24, 203 | false, 204 | 80, 205 | "default" 206 | ] 207 | }, 208 | { 209 | "id": 6, 210 | "type": "CLIPTextEncode", 211 | "pos": { 212 | "0": 415, 213 | "1": 186 214 | }, 215 | "size": { 216 | "0": 422.84503173828125, 217 | "1": 164.31304931640625 218 | }, 219 | "flags": {}, 220 | "order": 6, 221 | "mode": 0, 222 | "inputs": [ 223 | { 224 | "name": "clip", 225 | "type": "CLIP", 226 | "link": 74 227 | } 228 | ], 229 | "outputs": [ 230 | { 231 | "name": "CONDITIONING", 232 | "type": "CONDITIONING", 233 | "links": [ 234 | 46 235 | ], 236 | "slot_index": 0 237 | } 238 | ], 239 | "properties": { 240 | "Node name for S&R": "CLIPTextEncode" 241 | }, 242 | "widgets_values": [ 243 | "A movie trailer featuring the adventures of the 30 year old space man wearing a red wool knitted motorcycle helmet, blue sky, salt desert, cinematic style, shot on 35mm film, vivid colors.", 244 | true 245 | ] 246 | }, 247 | { 248 | "id": 21, 249 | "type": "EmptyMochiLatentVideo", 250 | "pos": { 251 | "0": 520, 252 | "1": 620 253 | }, 254 | "size": { 255 | "0": 315, 256 | "1": 130 257 | }, 258 | "flags": {}, 259 | "order": 1, 260 | "mode": 0, 261 | "inputs": [], 262 | "outputs": [ 263 | { 264 | "name": "LATENT", 265 | "type": "LATENT", 266 | "links": [ 267 | 38 268 | ], 269 | "slot_index": 0 270 | } 271 | ], 272 | "properties": { 273 | "Node name for S&R": "EmptyMochiLatentVideo" 274 | }, 275 | "widgets_values": [ 276 | 848, 277 | 480, 278 | 61, 279 | 1 280 | ] 281 | }, 282 | { 283 | "id": 37, 284 | "type": "UNETLoader", 285 | "pos": { 286 | "0": 420, 287 | "1": 40 288 | }, 289 | "size": { 290 | "0": 315, 291 | "1": 82 292 | }, 293 | "flags": {}, 294 | "order": 2, 295 | "mode": 0, 296 | "inputs": [], 297 | "outputs": [ 298 | { 299 | "name": "MODEL", 300 | "type": "MODEL", 301 | "links": [ 302 | 79 303 | ], 304 | "slot_index": 0 305 | } 306 | ], 307 | "properties": { 308 | "Node name for S&R": "UNETLoader" 309 | }, 310 | "widgets_values": [ 311 | "mochi_preview_bf16.safetensors", 312 | "default" 313 | ] 314 | }, 315 | { 316 | "id": 38, 317 | "type": "CLIPLoader", 318 | "pos": { 319 | "0": 40, 320 | "1": 270 321 | }, 322 | "size": { 323 | "0": 315, 324 | "1": 82 325 | }, 326 | "flags": {}, 327 | "order": 3, 328 | "mode": 0, 329 | "inputs": [], 330 | "outputs": [ 331 | { 332 | "name": "CLIP", 333 | "type": "CLIP", 334 | "links": [ 335 | 74, 336 | 75 337 | ], 338 | "slot_index": 0 339 | } 340 | ], 341 | "properties": { 342 | "Node name for S&R": "CLIPLoader" 343 | }, 344 | "widgets_values": [ 345 | "t5xxl_fp16.safetensors", 346 | "mochi" 347 | ] 348 | }, 349 | { 350 | "id": 40, 351 | "type": "Note", 352 | "pos": { 353 | "0": 923, 354 | "1": 638 355 | }, 356 | "size": [ 357 | 479.68055357205685, 358 | 280.99786553591184 359 | ], 360 | "flags": {}, 361 | "order": 4, 362 | "mode": 0, 363 | "inputs": [], 364 | "outputs": [], 365 | "properties": {}, 366 | "widgets_values": [ 367 | "number of frames: \n\nuse fp8 or fp16. \n\n73 cause the issue: OOM. \n\n60 is ok. \n\n48 is ok. " 368 | ], 369 | "color": "#432", 370 | "bgcolor": "#653" 371 | }, 372 | { 373 | "id": 41, 374 | "type": "Note", 375 | "pos": { 376 | "0": 49, 377 | "1": 42 378 | }, 379 | "size": [ 380 | 300.4007201997441, 381 | 152.27956788140827 382 | ], 383 | "flags": {}, 384 | "order": 5, 385 | "mode": 0, 386 | "inputs": [], 387 | "outputs": [], 388 | "properties": {}, 389 | "widgets_values": [ 390 | "\nhttps://www.youtube.com/@tech-practice9805 \nSee youtube channel for tutorial videos. " 391 | ], 392 | "color": "#432", 393 | "bgcolor": "#653" 394 | } 395 | ], 396 | "links": [ 397 | [ 398 | 35, 399 | 3, 400 | 0, 401 | 8, 402 | 0, 403 | "LATENT" 404 | ], 405 | [ 406 | 38, 407 | 21, 408 | 0, 409 | 3, 410 | 3, 411 | "LATENT" 412 | ], 413 | [ 414 | 46, 415 | 6, 416 | 0, 417 | 3, 418 | 1, 419 | "CONDITIONING" 420 | ], 421 | [ 422 | 52, 423 | 7, 424 | 0, 425 | 3, 426 | 2, 427 | "CONDITIONING" 428 | ], 429 | [ 430 | 56, 431 | 8, 432 | 0, 433 | 28, 434 | 0, 435 | "IMAGE" 436 | ], 437 | [ 438 | 74, 439 | 38, 440 | 0, 441 | 6, 442 | 0, 443 | "CLIP" 444 | ], 445 | [ 446 | 75, 447 | 38, 448 | 0, 449 | 7, 450 | 0, 451 | "CLIP" 452 | ], 453 | [ 454 | 76, 455 | 39, 456 | 0, 457 | 8, 458 | 1, 459 | "VAE" 460 | ], 461 | [ 462 | 79, 463 | 37, 464 | 0, 465 | 3, 466 | 0, 467 | "MODEL" 468 | ] 469 | ], 470 | "groups": [], 471 | "config": {}, 472 | "extra": { 473 | "ds": { 474 | "scale": 0.7247295000000005, 475 | "offset": [ 476 | 36.47709076282855, 477 | 252.47369730236002 478 | ] 479 | }, 480 | "workspace_info": { 481 | "id": "CZboPtTUn6eZSQRz3DL9n" 482 | } 483 | }, 484 | "version": 0.4 485 | } -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # ComfyUI_workflows_collection 2 | 3 | This repo is a collection of ComfyUI workflows. Most of them have been covered in video tutorials (see below). 4 | 5 | To run them online, try them free at [this site](https://agireact.com) 6 | 7 | # Latests: 8 | 9 | * [Wan2.1 Macbook MacMini text to video](https://youtu.be/SAX-Pue7kSw) 10 | * [TeaCache-ComfyUI speedup](https://youtu.be/q6mZLBiKZ2M) 11 | * [Wan 2.1 text or image to videos](https://youtu.be/tBKd2vB4n2o) 12 | * [RTX 5090 Run ComfyUI](https://youtu.be/WgWeDxJUoD0) 13 | * [LoRA use and training guide for HunyuanVideo](https://youtu.be/q2g82ePnZVs) 14 | * [Mac/MacBook Run HunyuanVideo](https://youtu.be/W6g_mCARTfM) 15 | * [AMD GPU Run HunyuanVideo](https://youtu.be/I6jzCJIii_o) 16 | * [SD3.5 Large + Lora](https://youtu.be/OuTEUrf4vvo) 17 | I finetuned a Lora using Macbook. Download it free from [huggingface](https://huggingface.co/Ttio2/sketch_shouxin) 18 | * [ComfyUI + HunyuanVideo text to video](https://youtu.be/R2VPQtJfTDY) 19 | * [ComfyUI + LTX_Video text to video](https://youtu.be/A0FSyx2E5tI) 20 | * [ComfyUI + Pulid + FLUX face cloning and style transfer](https://youtu.be/R1CKgzkOEYk) 21 | * [ComfyUI + SD3.5](https://youtu.be/8HYVAzYRYV4) 22 | 23 | # For video tutorials: 24 | 25 | * [TeaCache-ComfyUI speedup](https://youtu.be/q6mZLBiKZ2M) 26 | * [Wan 2.1 text or image to videos](https://youtu.be/tBKd2vB4n2o) 27 | * [LoRA use and training guide for HunyuanVideo](https://youtu.be/q2g82ePnZVs) 28 | * [ComfyUi installation on Macbooks](https://youtu.be/ZCswfm0dBYY) 29 | * [ComfyUI + FLUX](https://youtu.be/ZCswfm0dBYY) 30 | * [ComfyUi + FLUX + GGUF (low vram) ](https://youtu.be/9Sg9tHdTzRs) 31 | * [ComfyUI + FLUX + LoRA](https://youtu.be/HhXPnjFE3uc) 32 | * [ComfyUI + FLUX + Controlnet with or without GGUF](https://youtu.be/NRnU83vBkTs) 33 | 34 | # usage 35 | 36 | Download the workflow files (.png or .json) and then: 37 | 38 | 0. download the checkpoint model files, install missing custom nodes. 39 | 1. drag and drop the .json or .png file to the ComfyUI to load the workflow. 40 | 2. customize the checkpoint models 41 | 3. run the workflow 42 | 43 | -------------------------------------------------------------------------------- /SD3.5-large-workflow.json: -------------------------------------------------------------------------------- 1 | { 2 | "last_node_id": 53, 3 | "last_link_id": 104, 4 | "nodes": [ 5 | { 6 | "id": 8, 7 | "type": "VAEDecode", 8 | "pos": { 9 | "0": 1200, 10 | "1": 96 11 | }, 12 | "size": { 13 | "0": 210, 14 | "1": 46 15 | }, 16 | "flags": {}, 17 | "order": 10, 18 | "mode": 0, 19 | "inputs": [ 20 | { 21 | "name": "samples", 22 | "type": "LATENT", 23 | "link": 7 24 | }, 25 | { 26 | "name": "vae", 27 | "type": "VAE", 28 | "link": 53, 29 | "slot_index": 1 30 | } 31 | ], 32 | "outputs": [ 33 | { 34 | "name": "IMAGE", 35 | "type": "IMAGE", 36 | "links": [ 37 | 51 38 | ], 39 | "slot_index": 0 40 | } 41 | ], 42 | "properties": { 43 | "Node name for S&R": "VAEDecode" 44 | }, 45 | "widgets_values": [] 46 | }, 47 | { 48 | "id": 40, 49 | "type": "CLIPTextEncode", 50 | "pos": { 51 | "0": 384, 52 | "1": 336 53 | }, 54 | "size": { 55 | "0": 432, 56 | "1": 192 57 | }, 58 | "flags": {}, 59 | "order": 8, 60 | "mode": 0, 61 | "inputs": [ 62 | { 63 | "name": "clip", 64 | "type": "CLIP", 65 | "link": 104 66 | } 67 | ], 68 | "outputs": [ 69 | { 70 | "name": "CONDITIONING", 71 | "type": "CONDITIONING", 72 | "links": [ 73 | 80 74 | ], 75 | "slot_index": 0, 76 | "shape": 3 77 | } 78 | ], 79 | "title": "Negative Prompt", 80 | "properties": { 81 | "Node name for S&R": "CLIPTextEncode" 82 | }, 83 | "widgets_values": [ 84 | "" 85 | ], 86 | "color": "#322", 87 | "bgcolor": "#533" 88 | }, 89 | { 90 | "id": 53, 91 | "type": "EmptySD3LatentImage", 92 | "pos": { 93 | "0": 480, 94 | "1": 576 95 | }, 96 | "size": { 97 | "0": 315, 98 | "1": 106 99 | }, 100 | "flags": {}, 101 | "order": 0, 102 | "mode": 0, 103 | "inputs": [], 104 | "outputs": [ 105 | { 106 | "name": "LATENT", 107 | "type": "LATENT", 108 | "links": [ 109 | 100 110 | ], 111 | "slot_index": 0, 112 | "shape": 3 113 | } 114 | ], 115 | "properties": { 116 | "Node name for S&R": "EmptySD3LatentImage" 117 | }, 118 | "widgets_values": [ 119 | 1024, 120 | 1024, 121 | 1 122 | ] 123 | }, 124 | { 125 | "id": 43, 126 | "type": "TripleCLIPLoader", 127 | "pos": { 128 | "0": -96, 129 | "1": 288 130 | }, 131 | "size": { 132 | "0": 315, 133 | "1": 106 134 | }, 135 | "flags": {}, 136 | "order": 1, 137 | "mode": 0, 138 | "inputs": [], 139 | "outputs": [ 140 | { 141 | "name": "CLIP", 142 | "type": "CLIP", 143 | "links": [ 144 | 103, 145 | 104 146 | ], 147 | "slot_index": 0, 148 | "shape": 3 149 | } 150 | ], 151 | "properties": { 152 | "Node name for S&R": "TripleCLIPLoader" 153 | }, 154 | "widgets_values": [ 155 | "clip_l.safetensors", 156 | "clip_g.safetensors", 157 | "t5xxl_fp16.safetensors" 158 | ] 159 | }, 160 | { 161 | "id": 51, 162 | "type": "Note", 163 | "pos": { 164 | "0": -96, 165 | "1": 624 166 | }, 167 | "size": { 168 | "0": 384, 169 | "1": 192 170 | }, 171 | "flags": {}, 172 | "order": 2, 173 | "mode": 0, 174 | "inputs": [], 175 | "outputs": [], 176 | "properties": { 177 | "text": "" 178 | }, 179 | "widgets_values": [ 180 | "sd3.5_large.safetensors and sd3.5_medium.safetensors are files that do not contain any CLIP/text encoder weights so you need to load them separately.\n\nThey go in the ComfyUI/models/checkpoints directory." 181 | ], 182 | "color": "#432", 183 | "bgcolor": "#653" 184 | }, 185 | { 186 | "id": 41, 187 | "type": "CLIPLoader", 188 | "pos": { 189 | "0": -96, 190 | "1": 0 191 | }, 192 | "size": { 193 | "0": 315, 194 | "1": 82 195 | }, 196 | "flags": {}, 197 | "order": 3, 198 | "mode": 0, 199 | "inputs": [], 200 | "outputs": [ 201 | { 202 | "name": "CLIP", 203 | "type": "CLIP", 204 | "links": [], 205 | "slot_index": 0, 206 | "shape": 3 207 | } 208 | ], 209 | "properties": { 210 | "Node name for S&R": "CLIPLoader" 211 | }, 212 | "widgets_values": [ 213 | "t5xxl_fp8_e4m3fn.safetensors", 214 | "sd3" 215 | ] 216 | }, 217 | { 218 | "id": 42, 219 | "type": "DualCLIPLoader", 220 | "pos": { 221 | "0": -96, 222 | "1": 144 223 | }, 224 | "size": { 225 | "0": 315, 226 | "1": 106 227 | }, 228 | "flags": {}, 229 | "order": 4, 230 | "mode": 0, 231 | "inputs": [], 232 | "outputs": [ 233 | { 234 | "name": "CLIP", 235 | "type": "CLIP", 236 | "links": [], 237 | "slot_index": 0, 238 | "shape": 3 239 | } 240 | ], 241 | "properties": { 242 | "Node name for S&R": "DualCLIPLoader" 243 | }, 244 | "widgets_values": [ 245 | "clip_l.safetensors", 246 | "clip_g.safetensors", 247 | "sd3" 248 | ] 249 | }, 250 | { 251 | "id": 4, 252 | "type": "CheckpointLoaderSimple", 253 | "pos": { 254 | "0": -96, 255 | "1": 480 256 | }, 257 | "size": { 258 | "0": 384.75592041015625, 259 | "1": 98 260 | }, 261 | "flags": {}, 262 | "order": 5, 263 | "mode": 0, 264 | "inputs": [], 265 | "outputs": [ 266 | { 267 | "name": "MODEL", 268 | "type": "MODEL", 269 | "links": [ 270 | 99 271 | ], 272 | "slot_index": 0 273 | }, 274 | { 275 | "name": "CLIP", 276 | "type": "CLIP", 277 | "links": [], 278 | "slot_index": 1 279 | }, 280 | { 281 | "name": "VAE", 282 | "type": "VAE", 283 | "links": [ 284 | 53 285 | ], 286 | "slot_index": 2 287 | } 288 | ], 289 | "properties": { 290 | "Node name for S&R": "CheckpointLoaderSimple" 291 | }, 292 | "widgets_values": [ 293 | "sd3.5_large.safetensors" 294 | ] 295 | }, 296 | { 297 | "id": 3, 298 | "type": "KSampler", 299 | "pos": { 300 | "0": 864, 301 | "1": 96 302 | }, 303 | "size": { 304 | "0": 315, 305 | "1": 262 306 | }, 307 | "flags": {}, 308 | "order": 9, 309 | "mode": 0, 310 | "inputs": [ 311 | { 312 | "name": "model", 313 | "type": "MODEL", 314 | "link": 99, 315 | "slot_index": 0 316 | }, 317 | { 318 | "name": "positive", 319 | "type": "CONDITIONING", 320 | "link": 21 321 | }, 322 | { 323 | "name": "negative", 324 | "type": "CONDITIONING", 325 | "link": 80 326 | }, 327 | { 328 | "name": "latent_image", 329 | "type": "LATENT", 330 | "link": 100 331 | } 332 | ], 333 | "outputs": [ 334 | { 335 | "name": "LATENT", 336 | "type": "LATENT", 337 | "links": [ 338 | 7 339 | ], 340 | "slot_index": 0 341 | } 342 | ], 343 | "properties": { 344 | "Node name for S&R": "KSampler" 345 | }, 346 | "widgets_values": [ 347 | 714772221606588, 348 | "increment", 349 | 20, 350 | 5.45, 351 | "euler", 352 | "sgm_uniform", 353 | 1 354 | ] 355 | }, 356 | { 357 | "id": 16, 358 | "type": "CLIPTextEncode", 359 | "pos": { 360 | "0": 384, 361 | "1": 96 362 | }, 363 | "size": { 364 | "0": 432, 365 | "1": 192 366 | }, 367 | "flags": {}, 368 | "order": 7, 369 | "mode": 0, 370 | "inputs": [ 371 | { 372 | "name": "clip", 373 | "type": "CLIP", 374 | "link": 103 375 | } 376 | ], 377 | "outputs": [ 378 | { 379 | "name": "CONDITIONING", 380 | "type": "CONDITIONING", 381 | "links": [ 382 | 21 383 | ], 384 | "slot_index": 0 385 | } 386 | ], 387 | "title": "Positive Prompt", 388 | "properties": { 389 | "Node name for S&R": "CLIPTextEncode" 390 | }, 391 | "widgets_values": [ 392 | "a photo of a cute cat holding a sign that says \"Tech Practice\"" 393 | ], 394 | "color": "#232", 395 | "bgcolor": "#353" 396 | }, 397 | { 398 | "id": 50, 399 | "type": "Note", 400 | "pos": { 401 | "0": -384, 402 | "1": 144 403 | }, 404 | "size": { 405 | "0": 223.34756469726562, 406 | "1": 254.37765502929688 407 | }, 408 | "flags": {}, 409 | "order": 6, 410 | "mode": 0, 411 | "inputs": [], 412 | "outputs": [], 413 | "properties": { 414 | "text": "" 415 | }, 416 | "widgets_values": [ 417 | "SD3 supports different text encoder configurations, you can see how to load them here.\n\n\nMake sure to put these files:\nclip_g.safetensors\nclip_l.safetensors\nt5xxl_fp16.safetensors\n\n\nIn the ComfyUI/models/clip directory\n\nYoutube Channle:\nhttps://www.youtube.com/@tech-practice9805\n" 418 | ], 419 | "color": "#432", 420 | "bgcolor": "#653" 421 | }, 422 | { 423 | "id": 9, 424 | "type": "SaveImage", 425 | "pos": { 426 | "0": 876, 427 | "1": 470 428 | }, 429 | "size": { 430 | "0": 952.5112915039062, 431 | "1": 1007.9328002929688 432 | }, 433 | "flags": {}, 434 | "order": 11, 435 | "mode": 0, 436 | "inputs": [ 437 | { 438 | "name": "images", 439 | "type": "IMAGE", 440 | "link": 51, 441 | "slot_index": 0 442 | } 443 | ], 444 | "outputs": [], 445 | "properties": {}, 446 | "widgets_values": [ 447 | "ComfyUI" 448 | ] 449 | } 450 | ], 451 | "links": [ 452 | [ 453 | 7, 454 | 3, 455 | 0, 456 | 8, 457 | 0, 458 | "LATENT" 459 | ], 460 | [ 461 | 21, 462 | 16, 463 | 0, 464 | 3, 465 | 1, 466 | "CONDITIONING" 467 | ], 468 | [ 469 | 51, 470 | 8, 471 | 0, 472 | 9, 473 | 0, 474 | "IMAGE" 475 | ], 476 | [ 477 | 53, 478 | 4, 479 | 2, 480 | 8, 481 | 1, 482 | "VAE" 483 | ], 484 | [ 485 | 80, 486 | 40, 487 | 0, 488 | 3, 489 | 2, 490 | "CONDITIONING" 491 | ], 492 | [ 493 | 99, 494 | 4, 495 | 0, 496 | 3, 497 | 0, 498 | "MODEL" 499 | ], 500 | [ 501 | 100, 502 | 53, 503 | 0, 504 | 3, 505 | 3, 506 | "LATENT" 507 | ], 508 | [ 509 | 103, 510 | 43, 511 | 0, 512 | 16, 513 | 0, 514 | "CLIP" 515 | ], 516 | [ 517 | 104, 518 | 43, 519 | 0, 520 | 40, 521 | 0, 522 | "CLIP" 523 | ] 524 | ], 525 | "groups": [ 526 | { 527 | "title": "Different Text Encoder Configurations", 528 | "bounding": [ 529 | -144, 530 | -96, 531 | 480, 532 | 528 533 | ], 534 | "color": "#3f789e", 535 | "font_size": 24, 536 | "flags": {} 537 | } 538 | ], 539 | "config": {}, 540 | "extra": { 541 | "ds": { 542 | "scale": 0.6209213230591553, 543 | "offset": [ 544 | 1101.3806216057965, 545 | 374.56567129154166 546 | ] 547 | } 548 | }, 549 | "version": 0.4 550 | } -------------------------------------------------------------------------------- /SD3.5-turbo-large-workflow.json: -------------------------------------------------------------------------------- 1 | { 2 | "last_node_id": 53, 3 | "last_link_id": 104, 4 | "nodes": [ 5 | { 6 | "id": 8, 7 | "type": "VAEDecode", 8 | "pos": { 9 | "0": 1200, 10 | "1": 96 11 | }, 12 | "size": { 13 | "0": 210, 14 | "1": 46 15 | }, 16 | "flags": {}, 17 | "order": 10, 18 | "mode": 0, 19 | "inputs": [ 20 | { 21 | "name": "samples", 22 | "type": "LATENT", 23 | "link": 7 24 | }, 25 | { 26 | "name": "vae", 27 | "type": "VAE", 28 | "link": 53, 29 | "slot_index": 1 30 | } 31 | ], 32 | "outputs": [ 33 | { 34 | "name": "IMAGE", 35 | "type": "IMAGE", 36 | "links": [ 37 | 51 38 | ], 39 | "slot_index": 0 40 | } 41 | ], 42 | "properties": { 43 | "Node name for S&R": "VAEDecode" 44 | }, 45 | "widgets_values": [] 46 | }, 47 | { 48 | "id": 40, 49 | "type": "CLIPTextEncode", 50 | "pos": { 51 | "0": 384, 52 | "1": 336 53 | }, 54 | "size": { 55 | "0": 432, 56 | "1": 192 57 | }, 58 | "flags": {}, 59 | "order": 8, 60 | "mode": 0, 61 | "inputs": [ 62 | { 63 | "name": "clip", 64 | "type": "CLIP", 65 | "link": 104 66 | } 67 | ], 68 | "outputs": [ 69 | { 70 | "name": "CONDITIONING", 71 | "type": "CONDITIONING", 72 | "links": [ 73 | 80 74 | ], 75 | "slot_index": 0, 76 | "shape": 3 77 | } 78 | ], 79 | "title": "Negative Prompt", 80 | "properties": { 81 | "Node name for S&R": "CLIPTextEncode" 82 | }, 83 | "widgets_values": [ 84 | "" 85 | ], 86 | "color": "#322", 87 | "bgcolor": "#533" 88 | }, 89 | { 90 | "id": 53, 91 | "type": "EmptySD3LatentImage", 92 | "pos": { 93 | "0": 480, 94 | "1": 576 95 | }, 96 | "size": { 97 | "0": 315, 98 | "1": 106 99 | }, 100 | "flags": {}, 101 | "order": 0, 102 | "mode": 0, 103 | "inputs": [], 104 | "outputs": [ 105 | { 106 | "name": "LATENT", 107 | "type": "LATENT", 108 | "links": [ 109 | 100 110 | ], 111 | "slot_index": 0, 112 | "shape": 3 113 | } 114 | ], 115 | "properties": { 116 | "Node name for S&R": "EmptySD3LatentImage" 117 | }, 118 | "widgets_values": [ 119 | 1024, 120 | 1024, 121 | 1 122 | ] 123 | }, 124 | { 125 | "id": 43, 126 | "type": "TripleCLIPLoader", 127 | "pos": { 128 | "0": -96, 129 | "1": 288 130 | }, 131 | "size": { 132 | "0": 315, 133 | "1": 106 134 | }, 135 | "flags": {}, 136 | "order": 1, 137 | "mode": 0, 138 | "inputs": [], 139 | "outputs": [ 140 | { 141 | "name": "CLIP", 142 | "type": "CLIP", 143 | "links": [ 144 | 103, 145 | 104 146 | ], 147 | "slot_index": 0, 148 | "shape": 3 149 | } 150 | ], 151 | "properties": { 152 | "Node name for S&R": "TripleCLIPLoader" 153 | }, 154 | "widgets_values": [ 155 | "clip_l.safetensors", 156 | "clip_g.safetensors", 157 | "t5xxl_fp16.safetensors" 158 | ] 159 | }, 160 | { 161 | "id": 51, 162 | "type": "Note", 163 | "pos": { 164 | "0": -96, 165 | "1": 624 166 | }, 167 | "size": { 168 | "0": 384, 169 | "1": 192 170 | }, 171 | "flags": {}, 172 | "order": 2, 173 | "mode": 0, 174 | "inputs": [], 175 | "outputs": [], 176 | "properties": { 177 | "text": "" 178 | }, 179 | "widgets_values": [ 180 | "sd3.5_large.safetensors and sd3.5_medium.safetensors are files that do not contain any CLIP/text encoder weights so you need to load them separately.\n\nThey go in the ComfyUI/models/checkpoints directory." 181 | ], 182 | "color": "#432", 183 | "bgcolor": "#653" 184 | }, 185 | { 186 | "id": 41, 187 | "type": "CLIPLoader", 188 | "pos": { 189 | "0": -96, 190 | "1": 0 191 | }, 192 | "size": { 193 | "0": 315, 194 | "1": 82 195 | }, 196 | "flags": {}, 197 | "order": 3, 198 | "mode": 0, 199 | "inputs": [], 200 | "outputs": [ 201 | { 202 | "name": "CLIP", 203 | "type": "CLIP", 204 | "links": [], 205 | "slot_index": 0, 206 | "shape": 3 207 | } 208 | ], 209 | "properties": { 210 | "Node name for S&R": "CLIPLoader" 211 | }, 212 | "widgets_values": [ 213 | "t5xxl_fp8_e4m3fn.safetensors", 214 | "sd3" 215 | ] 216 | }, 217 | { 218 | "id": 42, 219 | "type": "DualCLIPLoader", 220 | "pos": { 221 | "0": -96, 222 | "1": 144 223 | }, 224 | "size": { 225 | "0": 315, 226 | "1": 106 227 | }, 228 | "flags": {}, 229 | "order": 4, 230 | "mode": 0, 231 | "inputs": [], 232 | "outputs": [ 233 | { 234 | "name": "CLIP", 235 | "type": "CLIP", 236 | "links": [], 237 | "slot_index": 0, 238 | "shape": 3 239 | } 240 | ], 241 | "properties": { 242 | "Node name for S&R": "DualCLIPLoader" 243 | }, 244 | "widgets_values": [ 245 | "clip_l.safetensors", 246 | "clip_g.safetensors", 247 | "sd3" 248 | ] 249 | }, 250 | { 251 | "id": 16, 252 | "type": "CLIPTextEncode", 253 | "pos": { 254 | "0": 384, 255 | "1": 96 256 | }, 257 | "size": { 258 | "0": 432, 259 | "1": 192 260 | }, 261 | "flags": {}, 262 | "order": 7, 263 | "mode": 0, 264 | "inputs": [ 265 | { 266 | "name": "clip", 267 | "type": "CLIP", 268 | "link": 103 269 | } 270 | ], 271 | "outputs": [ 272 | { 273 | "name": "CONDITIONING", 274 | "type": "CONDITIONING", 275 | "links": [ 276 | 21 277 | ], 278 | "slot_index": 0 279 | } 280 | ], 281 | "title": "Positive Prompt", 282 | "properties": { 283 | "Node name for S&R": "CLIPTextEncode" 284 | }, 285 | "widgets_values": [ 286 | "a photo of a cute cat holding a sign that says \"Tech Practice\"" 287 | ], 288 | "color": "#232", 289 | "bgcolor": "#353" 290 | }, 291 | { 292 | "id": 4, 293 | "type": "CheckpointLoaderSimple", 294 | "pos": { 295 | "0": -96, 296 | "1": 480 297 | }, 298 | "size": { 299 | "0": 384.75592041015625, 300 | "1": 98 301 | }, 302 | "flags": {}, 303 | "order": 5, 304 | "mode": 0, 305 | "inputs": [], 306 | "outputs": [ 307 | { 308 | "name": "MODEL", 309 | "type": "MODEL", 310 | "links": [ 311 | 99 312 | ], 313 | "slot_index": 0 314 | }, 315 | { 316 | "name": "CLIP", 317 | "type": "CLIP", 318 | "links": [], 319 | "slot_index": 1 320 | }, 321 | { 322 | "name": "VAE", 323 | "type": "VAE", 324 | "links": [ 325 | 53 326 | ], 327 | "slot_index": 2 328 | } 329 | ], 330 | "properties": { 331 | "Node name for S&R": "CheckpointLoaderSimple" 332 | }, 333 | "widgets_values": [ 334 | "sd3.5_large_turbo.safetensors" 335 | ] 336 | }, 337 | { 338 | "id": 3, 339 | "type": "KSampler", 340 | "pos": { 341 | "0": 864, 342 | "1": 96 343 | }, 344 | "size": { 345 | "0": 315, 346 | "1": 262 347 | }, 348 | "flags": {}, 349 | "order": 9, 350 | "mode": 0, 351 | "inputs": [ 352 | { 353 | "name": "model", 354 | "type": "MODEL", 355 | "link": 99, 356 | "slot_index": 0 357 | }, 358 | { 359 | "name": "positive", 360 | "type": "CONDITIONING", 361 | "link": 21 362 | }, 363 | { 364 | "name": "negative", 365 | "type": "CONDITIONING", 366 | "link": 80 367 | }, 368 | { 369 | "name": "latent_image", 370 | "type": "LATENT", 371 | "link": 100 372 | } 373 | ], 374 | "outputs": [ 375 | { 376 | "name": "LATENT", 377 | "type": "LATENT", 378 | "links": [ 379 | 7 380 | ], 381 | "slot_index": 0 382 | } 383 | ], 384 | "properties": { 385 | "Node name for S&R": "KSampler" 386 | }, 387 | "widgets_values": [ 388 | 714772221606592, 389 | "increment", 390 | 4, 391 | 1.2, 392 | "euler", 393 | "sgm_uniform", 394 | 1 395 | ] 396 | }, 397 | { 398 | "id": 9, 399 | "type": "SaveImage", 400 | "pos": { 401 | "0": 835, 402 | "1": 516 403 | }, 404 | "size": [ 405 | 703.595850544591, 406 | 559.1004678134202 407 | ], 408 | "flags": {}, 409 | "order": 11, 410 | "mode": 0, 411 | "inputs": [ 412 | { 413 | "name": "images", 414 | "type": "IMAGE", 415 | "link": 51, 416 | "slot_index": 0 417 | } 418 | ], 419 | "outputs": [], 420 | "properties": {}, 421 | "widgets_values": [ 422 | "ComfyUI" 423 | ] 424 | }, 425 | { 426 | "id": 50, 427 | "type": "Note", 428 | "pos": { 429 | "0": -384, 430 | "1": 144 431 | }, 432 | "size": [ 433 | 234.0926108751708, 434 | 467.5384678134204 435 | ], 436 | "flags": {}, 437 | "order": 6, 438 | "mode": 0, 439 | "inputs": [], 440 | "outputs": [], 441 | "properties": { 442 | "text": "" 443 | }, 444 | "widgets_values": [ 445 | "SD3 supports different text encoder configurations, you can see how to load them here.\n\n\nMake sure to put these files:\nclip_g.safetensors\nclip_l.safetensors\nt5xxl_fp16.safetensors\n\n\nIn the ComfyUI/models/clip directory\n\n\nYoutube channel: https://www.youtube.com/@tech-practice9805\n\n" 446 | ], 447 | "color": "#432", 448 | "bgcolor": "#653" 449 | } 450 | ], 451 | "links": [ 452 | [ 453 | 7, 454 | 3, 455 | 0, 456 | 8, 457 | 0, 458 | "LATENT" 459 | ], 460 | [ 461 | 21, 462 | 16, 463 | 0, 464 | 3, 465 | 1, 466 | "CONDITIONING" 467 | ], 468 | [ 469 | 51, 470 | 8, 471 | 0, 472 | 9, 473 | 0, 474 | "IMAGE" 475 | ], 476 | [ 477 | 53, 478 | 4, 479 | 2, 480 | 8, 481 | 1, 482 | "VAE" 483 | ], 484 | [ 485 | 80, 486 | 40, 487 | 0, 488 | 3, 489 | 2, 490 | "CONDITIONING" 491 | ], 492 | [ 493 | 99, 494 | 4, 495 | 0, 496 | 3, 497 | 0, 498 | "MODEL" 499 | ], 500 | [ 501 | 100, 502 | 53, 503 | 0, 504 | 3, 505 | 3, 506 | "LATENT" 507 | ], 508 | [ 509 | 103, 510 | 43, 511 | 0, 512 | 16, 513 | 0, 514 | "CLIP" 515 | ], 516 | [ 517 | 104, 518 | 43, 519 | 0, 520 | 40, 521 | 0, 522 | "CLIP" 523 | ] 524 | ], 525 | "groups": [ 526 | { 527 | "title": "Different Text Encoder Configurations", 528 | "bounding": [ 529 | -144, 530 | -96, 531 | 480, 532 | 528 533 | ], 534 | "color": "#3f789e", 535 | "font_size": 24, 536 | "flags": {} 537 | } 538 | ], 539 | "config": {}, 540 | "extra": { 541 | "ds": { 542 | "scale": 1.1, 543 | "offset": [ 544 | 586.2710254884655, 545 | 150.27971400476133 546 | ] 547 | } 548 | }, 549 | "version": 0.4 550 | } -------------------------------------------------------------------------------- /SD35_Large_lora.json: -------------------------------------------------------------------------------- 1 | { 2 | "last_node_id": 55, 3 | "last_link_id": 110, 4 | "nodes": [ 5 | { 6 | "id": 42, 7 | "type": "DualCLIPLoader", 8 | "pos": [ 9 | -75.7979965209961, 10 | -47.91917419433594 11 | ], 12 | "size": [ 13 | 315, 14 | 106 15 | ], 16 | "flags": {}, 17 | "order": 0, 18 | "mode": 0, 19 | "inputs": [], 20 | "outputs": [ 21 | { 22 | "name": "CLIP", 23 | "type": "CLIP", 24 | "links": [], 25 | "slot_index": 0, 26 | "shape": 3 27 | } 28 | ], 29 | "properties": { 30 | "Node name for S&R": "DualCLIPLoader" 31 | }, 32 | "widgets_values": [ 33 | "clip_l.safetensors", 34 | "clip_g.safetensors", 35 | "sd3" 36 | ] 37 | }, 38 | { 39 | "id": 40, 40 | "type": "CLIPTextEncode", 41 | "pos": [ 42 | 404.2020568847656, 43 | 144.080810546875 44 | ], 45 | "size": [ 46 | 432, 47 | 192 48 | ], 49 | "flags": {}, 50 | "order": 8, 51 | "mode": 0, 52 | "inputs": [ 53 | { 54 | "name": "clip", 55 | "type": "CLIP", 56 | "link": 110 57 | } 58 | ], 59 | "outputs": [ 60 | { 61 | "name": "CONDITIONING", 62 | "type": "CONDITIONING", 63 | "links": [ 64 | 80 65 | ], 66 | "slot_index": 0, 67 | "shape": 3 68 | } 69 | ], 70 | "title": "Negative Prompt", 71 | "properties": { 72 | "Node name for S&R": "CLIPTextEncode" 73 | }, 74 | "widgets_values": [ 75 | "", 76 | [ 77 | false, 78 | true 79 | ] 80 | ], 81 | "color": "#322", 82 | "bgcolor": "#533" 83 | }, 84 | { 85 | "id": 43, 86 | "type": "TripleCLIPLoader", 87 | "pos": [ 88 | -75.7979965209961, 89 | 96.08082580566406 90 | ], 91 | "size": [ 92 | 315, 93 | 106 94 | ], 95 | "flags": {}, 96 | "order": 1, 97 | "mode": 0, 98 | "inputs": [], 99 | "outputs": [ 100 | { 101 | "name": "CLIP", 102 | "type": "CLIP", 103 | "links": [ 104 | 109, 105 | 110 106 | ], 107 | "slot_index": 0, 108 | "shape": 3 109 | } 110 | ], 111 | "properties": { 112 | "Node name for S&R": "TripleCLIPLoader" 113 | }, 114 | "widgets_values": [ 115 | "clip_l.safetensors", 116 | "clip_g.safetensors", 117 | "t5xxl_fp8_e4m3fn.safetensors" 118 | ] 119 | }, 120 | { 121 | "id": 41, 122 | "type": "CLIPLoader", 123 | "pos": [ 124 | -75.7979965209961, 125 | -191.919189453125 126 | ], 127 | "size": [ 128 | 315, 129 | 82 130 | ], 131 | "flags": {}, 132 | "order": 2, 133 | "mode": 0, 134 | "inputs": [], 135 | "outputs": [ 136 | { 137 | "name": "CLIP", 138 | "type": "CLIP", 139 | "links": [], 140 | "slot_index": 0, 141 | "shape": 3 142 | } 143 | ], 144 | "properties": { 145 | "Node name for S&R": "CLIPLoader" 146 | }, 147 | "widgets_values": [ 148 | "t5xxl_fp8_e4m3fn.safetensors", 149 | "sd3" 150 | ] 151 | }, 152 | { 153 | "id": 4, 154 | "type": "CheckpointLoaderSimple", 155 | "pos": [ 156 | -115.6825180053711, 157 | 284.37066650390625 158 | ], 159 | "size": [ 160 | 384.75592041015625, 161 | 98 162 | ], 163 | "flags": {}, 164 | "order": 3, 165 | "mode": 0, 166 | "inputs": [], 167 | "outputs": [ 168 | { 169 | "name": "MODEL", 170 | "type": "MODEL", 171 | "links": [ 172 | 105 173 | ], 174 | "slot_index": 0 175 | }, 176 | { 177 | "name": "CLIP", 178 | "type": "CLIP", 179 | "links": [], 180 | "slot_index": 1 181 | }, 182 | { 183 | "name": "VAE", 184 | "type": "VAE", 185 | "links": [ 186 | 53 187 | ], 188 | "slot_index": 2 189 | } 190 | ], 191 | "properties": { 192 | "Node name for S&R": "CheckpointLoaderSimple" 193 | }, 194 | "widgets_values": [ 195 | "sd3.5_large.safetensors" 196 | ] 197 | }, 198 | { 199 | "id": 54, 200 | "type": "LoraLoaderModelOnly", 201 | "pos": [ 202 | 417.9397277832031, 203 | 387.0163269042969 204 | ], 205 | "size": [ 206 | 315, 207 | 82 208 | ], 209 | "flags": {}, 210 | "order": 9, 211 | "mode": 0, 212 | "inputs": [ 213 | { 214 | "name": "model", 215 | "type": "MODEL", 216 | "link": 105 217 | } 218 | ], 219 | "outputs": [ 220 | { 221 | "name": "MODEL", 222 | "type": "MODEL", 223 | "links": [ 224 | 106 225 | ], 226 | "slot_index": 0 227 | } 228 | ], 229 | "properties": { 230 | "Node name for S&R": "LoraLoaderModelOnly" 231 | }, 232 | "widgets_values": [ 233 | "sketch_shouxin.safetensors", 234 | 1 235 | ] 236 | }, 237 | { 238 | "id": 3, 239 | "type": "KSampler", 240 | "pos": [ 241 | 884.2014770507812, 242 | -242.47186279296875 243 | ], 244 | "size": [ 245 | 315, 246 | 262 247 | ], 248 | "flags": {}, 249 | "order": 10, 250 | "mode": 0, 251 | "inputs": [ 252 | { 253 | "name": "model", 254 | "type": "MODEL", 255 | "link": 106, 256 | "slot_index": 0 257 | }, 258 | { 259 | "name": "positive", 260 | "type": "CONDITIONING", 261 | "link": 21 262 | }, 263 | { 264 | "name": "negative", 265 | "type": "CONDITIONING", 266 | "link": 80 267 | }, 268 | { 269 | "name": "latent_image", 270 | "type": "LATENT", 271 | "link": 100 272 | } 273 | ], 274 | "outputs": [ 275 | { 276 | "name": "LATENT", 277 | "type": "LATENT", 278 | "links": [ 279 | 7 280 | ], 281 | "slot_index": 0 282 | } 283 | ], 284 | "properties": { 285 | "Node name for S&R": "KSampler" 286 | }, 287 | "widgets_values": [ 288 | 79700635096826, 289 | "randomize", 290 | 20, 291 | 5.45, 292 | "euler", 293 | "sgm_uniform", 294 | 1 295 | ] 296 | }, 297 | { 298 | "id": 8, 299 | "type": "VAEDecode", 300 | "pos": [ 301 | 1229.47705078125, 302 | -235.051513671875 303 | ], 304 | "size": [ 305 | 210, 306 | 46 307 | ], 308 | "flags": {}, 309 | "order": 11, 310 | "mode": 0, 311 | "inputs": [ 312 | { 313 | "name": "samples", 314 | "type": "LATENT", 315 | "link": 7 316 | }, 317 | { 318 | "name": "vae", 319 | "type": "VAE", 320 | "link": 53, 321 | "slot_index": 1 322 | } 323 | ], 324 | "outputs": [ 325 | { 326 | "name": "IMAGE", 327 | "type": "IMAGE", 328 | "links": [ 329 | 51 330 | ], 331 | "slot_index": 0 332 | } 333 | ], 334 | "properties": { 335 | "Node name for S&R": "VAEDecode" 336 | }, 337 | "widgets_values": [] 338 | }, 339 | { 340 | "id": 53, 341 | "type": "EmptySD3LatentImage", 342 | "pos": [ 343 | 888.3394165039062, 344 | 94.41972351074219 345 | ], 346 | "size": [ 347 | 315, 348 | 106 349 | ], 350 | "flags": {}, 351 | "order": 4, 352 | "mode": 0, 353 | "inputs": [], 354 | "outputs": [ 355 | { 356 | "name": "LATENT", 357 | "type": "LATENT", 358 | "links": [ 359 | 100 360 | ], 361 | "slot_index": 0, 362 | "shape": 3 363 | } 364 | ], 365 | "properties": { 366 | "Node name for S&R": "EmptySD3LatentImage" 367 | }, 368 | "widgets_values": [ 369 | 1024, 370 | 1024, 371 | 1 372 | ] 373 | }, 374 | { 375 | "id": 50, 376 | "type": "Note", 377 | "pos": [ 378 | -368.8996276855469, 379 | -245.85809326171875 380 | ], 381 | "size": [ 382 | 223.34756469726562, 383 | 254.37765502929688 384 | ], 385 | "flags": {}, 386 | "order": 5, 387 | "mode": 0, 388 | "inputs": [], 389 | "outputs": [], 390 | "properties": { 391 | "text": "" 392 | }, 393 | "widgets_values": [ 394 | "SD3 supports different text encoder configurations, you can see how to load them here.\n\n\nMake sure to put these files:\nclip_g.safetensors\nclip_l.safetensors\nt5xxl_fp16.safetensors\n\n\nIn the ComfyUI/models/clip directory" 395 | ], 396 | "color": "#432", 397 | "bgcolor": "#653" 398 | }, 399 | { 400 | "id": 9, 401 | "type": "SaveImage", 402 | "pos": [ 403 | 1285.98681640625, 404 | -43.66995620727539 405 | ], 406 | "size": [ 407 | 652.2518310546875, 408 | 721.1181030273438 409 | ], 410 | "flags": {}, 411 | "order": 12, 412 | "mode": 0, 413 | "inputs": [ 414 | { 415 | "name": "images", 416 | "type": "IMAGE", 417 | "link": 51, 418 | "slot_index": 0 419 | } 420 | ], 421 | "outputs": [], 422 | "properties": { 423 | "Node name for S&R": "SaveImage" 424 | }, 425 | "widgets_values": [ 426 | "ComfyUI" 427 | ] 428 | }, 429 | { 430 | "id": 16, 431 | "type": "CLIPTextEncode", 432 | "pos": [ 433 | 394.9264831542969, 434 | -200.73214721679688 435 | ], 436 | "size": [ 437 | 432, 438 | 192 439 | ], 440 | "flags": {}, 441 | "order": 7, 442 | "mode": 0, 443 | "inputs": [ 444 | { 445 | "name": "clip", 446 | "type": "CLIP", 447 | "link": 109 448 | } 449 | ], 450 | "outputs": [ 451 | { 452 | "name": "CONDITIONING", 453 | "type": "CONDITIONING", 454 | "links": [ 455 | 21 456 | ], 457 | "slot_index": 0 458 | } 459 | ], 460 | "title": "Positive Prompt", 461 | "properties": { 462 | "Node name for S&R": "CLIPTextEncode" 463 | }, 464 | "widgets_values": [ 465 | "shouxin, pencil sketch of a rabbit", 466 | [ 467 | false, 468 | true 469 | ] 470 | ], 471 | "color": "#232", 472 | "bgcolor": "#353" 473 | }, 474 | { 475 | "id": 55, 476 | "type": "Note", 477 | "pos": [ 478 | -835.9465942382812, 479 | -320.48406982421875 480 | ], 481 | "size": [ 482 | 356.666748046875, 483 | 171.5010223388672 484 | ], 485 | "flags": {}, 486 | "order": 6, 487 | "mode": 0, 488 | "inputs": [], 489 | "outputs": [], 490 | "properties": {}, 491 | "widgets_values": [ 492 | "\nAuthor: Tech Practice. Youtube channel: https://www.youtube.com/@tech-practice9805\n\n\nDownload the lora model from https://huggingface.co/Ttio2/sketch_shouxin and put it to the models/loras directory. " 493 | ], 494 | "color": "#432", 495 | "bgcolor": "#653" 496 | } 497 | ], 498 | "links": [ 499 | [ 500 | 7, 501 | 3, 502 | 0, 503 | 8, 504 | 0, 505 | "LATENT" 506 | ], 507 | [ 508 | 21, 509 | 16, 510 | 0, 511 | 3, 512 | 1, 513 | "CONDITIONING" 514 | ], 515 | [ 516 | 51, 517 | 8, 518 | 0, 519 | 9, 520 | 0, 521 | "IMAGE" 522 | ], 523 | [ 524 | 53, 525 | 4, 526 | 2, 527 | 8, 528 | 1, 529 | "VAE" 530 | ], 531 | [ 532 | 80, 533 | 40, 534 | 0, 535 | 3, 536 | 2, 537 | "CONDITIONING" 538 | ], 539 | [ 540 | 100, 541 | 53, 542 | 0, 543 | 3, 544 | 3, 545 | "LATENT" 546 | ], 547 | [ 548 | 105, 549 | 4, 550 | 0, 551 | 54, 552 | 0, 553 | "MODEL" 554 | ], 555 | [ 556 | 106, 557 | 54, 558 | 0, 559 | 3, 560 | 0, 561 | "MODEL" 562 | ], 563 | [ 564 | 109, 565 | 43, 566 | 0, 567 | 16, 568 | 0, 569 | "CLIP" 570 | ], 571 | [ 572 | 110, 573 | 43, 574 | 0, 575 | 40, 576 | 0, 577 | "CLIP" 578 | ] 579 | ], 580 | "groups": [ 581 | { 582 | "id": 2, 583 | "title": "SD3.5 Large Lora", 584 | "bounding": [ 585 | -465.07427978515625, 586 | -400.086669921875, 587 | 2853.901123046875, 588 | 1218.1260986328125 589 | ], 590 | "color": "#8AA", 591 | "font_size": 24, 592 | "flags": {} 593 | }, 594 | { 595 | "id": 1, 596 | "title": "Different Text Encoder Configurations", 597 | "bounding": [ 598 | -123.79796600341797, 599 | -287.91912841796875, 600 | 480, 601 | 528 602 | ], 603 | "color": "#3f789e", 604 | "font_size": 24, 605 | "flags": {} 606 | } 607 | ], 608 | "config": {}, 609 | "extra": { 610 | "ds": { 611 | "scale": 0.45, 612 | "offset": [ 613 | 1431.6872666598363, 614 | 801.4002228620454 615 | ] 616 | }, 617 | "ue_links": [] 618 | }, 619 | "version": 0.4 620 | } -------------------------------------------------------------------------------- /controlnet_workflows_example.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ttio2tech/ComfyUI_workflows_collection/7ba66522ad516317a435b2fd6bacc9a256dcf7a3/controlnet_workflows_example.jpg -------------------------------------------------------------------------------- /flux1-dev-bnb-nf4-workflow.json: -------------------------------------------------------------------------------- 1 | { 2 | "last_node_id": 34, 3 | "last_link_id": 73, 4 | "nodes": [ 5 | { 6 | "id": 6, 7 | "type": "CLIPTextEncode", 8 | "pos": [ 9 | 332, 10 | 225 11 | ], 12 | "size": { 13 | "0": 422.84503173828125, 14 | "1": 164.31304931640625 15 | }, 16 | "flags": {}, 17 | "order": 5, 18 | "mode": 0, 19 | "inputs": [ 20 | { 21 | "name": "clip", 22 | "type": "CLIP", 23 | "link": 72 24 | } 25 | ], 26 | "outputs": [ 27 | { 28 | "name": "CONDITIONING", 29 | "type": "CONDITIONING", 30 | "links": [ 31 | 40 32 | ], 33 | "slot_index": 0 34 | } 35 | ], 36 | "properties": { 37 | "Node name for S&R": "CLIPTextEncode" 38 | }, 39 | "widgets_values": [ 40 | "girl with green hat, summer, city street, a sign saying \"Tech-Practice\"" 41 | ] 42 | }, 43 | { 44 | "id": 8, 45 | "type": "VAEDecode", 46 | "pos": [ 47 | 1256, 48 | 191 49 | ], 50 | "size": { 51 | "0": 140, 52 | "1": 46 53 | }, 54 | "flags": {}, 55 | "order": 8, 56 | "mode": 0, 57 | "inputs": [ 58 | { 59 | "name": "samples", 60 | "type": "LATENT", 61 | "link": 55 62 | }, 63 | { 64 | "name": "vae", 65 | "type": "VAE", 66 | "link": 73 67 | } 68 | ], 69 | "outputs": [ 70 | { 71 | "name": "IMAGE", 72 | "type": "IMAGE", 73 | "links": [ 74 | 9 75 | ], 76 | "slot_index": 0 77 | } 78 | ], 79 | "properties": { 80 | "Node name for S&R": "VAEDecode" 81 | } 82 | }, 83 | { 84 | "id": 9, 85 | "type": "SaveImage", 86 | "pos": [ 87 | 1420, 88 | 163 89 | ], 90 | "size": { 91 | "0": 587.9147338867188, 92 | "1": 615.1401977539062 93 | }, 94 | "flags": {}, 95 | "order": 9, 96 | "mode": 0, 97 | "inputs": [ 98 | { 99 | "name": "images", 100 | "type": "IMAGE", 101 | "link": 9 102 | } 103 | ], 104 | "properties": {}, 105 | "widgets_values": [ 106 | "ComfyUI" 107 | ] 108 | }, 109 | { 110 | "id": 13, 111 | "type": "SamplerCustomAdvanced", 112 | "pos": [ 113 | 842, 114 | 215 115 | ], 116 | "size": { 117 | "0": 355.20001220703125, 118 | "1": 106 119 | }, 120 | "flags": {}, 121 | "order": 7, 122 | "mode": 0, 123 | "inputs": [ 124 | { 125 | "name": "noise", 126 | "type": "NOISE", 127 | "link": 37, 128 | "slot_index": 0 129 | }, 130 | { 131 | "name": "guider", 132 | "type": "GUIDER", 133 | "link": 52, 134 | "slot_index": 1 135 | }, 136 | { 137 | "name": "sampler", 138 | "type": "SAMPLER", 139 | "link": 19, 140 | "slot_index": 2 141 | }, 142 | { 143 | "name": "sigmas", 144 | "type": "SIGMAS", 145 | "link": 20, 146 | "slot_index": 3 147 | }, 148 | { 149 | "name": "latent_image", 150 | "type": "LATENT", 151 | "link": 54, 152 | "slot_index": 4 153 | } 154 | ], 155 | "outputs": [ 156 | { 157 | "name": "output", 158 | "type": "LATENT", 159 | "links": [ 160 | 55 161 | ], 162 | "slot_index": 0, 163 | "shape": 3 164 | }, 165 | { 166 | "name": "denoised_output", 167 | "type": "LATENT", 168 | "links": [], 169 | "slot_index": 1, 170 | "shape": 3 171 | } 172 | ], 173 | "properties": { 174 | "Node name for S&R": "SamplerCustomAdvanced" 175 | } 176 | }, 177 | { 178 | "id": 16, 179 | "type": "KSamplerSelect", 180 | "pos": [ 181 | 464, 182 | 796 183 | ], 184 | "size": { 185 | "0": 315, 186 | "1": 58 187 | }, 188 | "flags": {}, 189 | "order": 0, 190 | "mode": 0, 191 | "outputs": [ 192 | { 193 | "name": "SAMPLER", 194 | "type": "SAMPLER", 195 | "links": [ 196 | 19 197 | ], 198 | "shape": 3 199 | } 200 | ], 201 | "properties": { 202 | "Node name for S&R": "KSamplerSelect" 203 | }, 204 | "widgets_values": [ 205 | "dpmpp_2m" 206 | ] 207 | }, 208 | { 209 | "id": 17, 210 | "type": "BasicScheduler", 211 | "pos": [ 212 | 461, 213 | 914 214 | ], 215 | "size": { 216 | "0": 315, 217 | "1": 106 218 | }, 219 | "flags": {}, 220 | "order": 4, 221 | "mode": 0, 222 | "inputs": [ 223 | { 224 | "name": "model", 225 | "type": "MODEL", 226 | "link": 60, 227 | "slot_index": 0 228 | } 229 | ], 230 | "outputs": [ 231 | { 232 | "name": "SIGMAS", 233 | "type": "SIGMAS", 234 | "links": [ 235 | 20 236 | ], 237 | "shape": 3 238 | } 239 | ], 240 | "properties": { 241 | "Node name for S&R": "BasicScheduler" 242 | }, 243 | "widgets_values": [ 244 | "simple", 245 | 20, 246 | 1 247 | ] 248 | }, 249 | { 250 | "id": 22, 251 | "type": "BasicGuider", 252 | "pos": [ 253 | 463, 254 | 96 255 | ], 256 | "size": { 257 | "0": 241.79998779296875, 258 | "1": 46 259 | }, 260 | "flags": {}, 261 | "order": 6, 262 | "mode": 0, 263 | "inputs": [ 264 | { 265 | "name": "model", 266 | "type": "MODEL", 267 | "link": 59, 268 | "slot_index": 0 269 | }, 270 | { 271 | "name": "conditioning", 272 | "type": "CONDITIONING", 273 | "link": 40, 274 | "slot_index": 1 275 | } 276 | ], 277 | "outputs": [ 278 | { 279 | "name": "GUIDER", 280 | "type": "GUIDER", 281 | "links": [ 282 | 52 283 | ], 284 | "slot_index": 0, 285 | "shape": 3 286 | } 287 | ], 288 | "properties": { 289 | "Node name for S&R": "BasicGuider" 290 | } 291 | }, 292 | { 293 | "id": 25, 294 | "type": "RandomNoise", 295 | "pos": [ 296 | 464, 297 | 649 298 | ], 299 | "size": { 300 | "0": 315, 301 | "1": 82 302 | }, 303 | "flags": {}, 304 | "order": 1, 305 | "mode": 0, 306 | "outputs": [ 307 | { 308 | "name": "NOISE", 309 | "type": "NOISE", 310 | "links": [ 311 | 37 312 | ], 313 | "shape": 3 314 | } 315 | ], 316 | "properties": { 317 | "Node name for S&R": "RandomNoise" 318 | }, 319 | "widgets_values": [ 320 | 375980420730706, 321 | "increment" 322 | ] 323 | }, 324 | { 325 | "id": 29, 326 | "type": "EmptySD3LatentImage", 327 | "pos": [ 328 | 470, 329 | 481 330 | ], 331 | "size": { 332 | "0": 304.2161865234375, 333 | "1": 106 334 | }, 335 | "flags": {}, 336 | "order": 2, 337 | "mode": 0, 338 | "outputs": [ 339 | { 340 | "name": "LATENT", 341 | "type": "LATENT", 342 | "links": [ 343 | 54 344 | ], 345 | "slot_index": 0, 346 | "shape": 3 347 | } 348 | ], 349 | "properties": { 350 | "Node name for S&R": "EmptySD3LatentImage" 351 | }, 352 | "widgets_values": [ 353 | 1024, 354 | 1024, 355 | 1 356 | ] 357 | }, 358 | { 359 | "id": 31, 360 | "type": "CheckpointLoaderNF4", 361 | "pos": [ 362 | -96, 363 | 512 364 | ], 365 | "size": { 366 | "0": 397.76641845703125, 367 | "1": 98 368 | }, 369 | "flags": {}, 370 | "order": 3, 371 | "mode": 0, 372 | "outputs": [ 373 | { 374 | "name": "MODEL", 375 | "type": "MODEL", 376 | "links": [ 377 | 59, 378 | 60 379 | ], 380 | "slot_index": 0, 381 | "shape": 3 382 | }, 383 | { 384 | "name": "CLIP", 385 | "type": "CLIP", 386 | "links": [ 387 | 72 388 | ], 389 | "slot_index": 1, 390 | "shape": 3 391 | }, 392 | { 393 | "name": "VAE", 394 | "type": "VAE", 395 | "links": [ 396 | 73 397 | ], 398 | "slot_index": 2, 399 | "shape": 3 400 | } 401 | ], 402 | "properties": { 403 | "Node name for S&R": "CheckpointLoaderNF4" 404 | }, 405 | "widgets_values": [ 406 | "flux1-dev-bnb-nf4-v2.safetensors" 407 | ] 408 | } 409 | ], 410 | "links": [ 411 | [ 412 | 9, 413 | 8, 414 | 0, 415 | 9, 416 | 0, 417 | "IMAGE" 418 | ], 419 | [ 420 | 19, 421 | 16, 422 | 0, 423 | 13, 424 | 2, 425 | "SAMPLER" 426 | ], 427 | [ 428 | 20, 429 | 17, 430 | 0, 431 | 13, 432 | 3, 433 | "SIGMAS" 434 | ], 435 | [ 436 | 37, 437 | 25, 438 | 0, 439 | 13, 440 | 0, 441 | "NOISE" 442 | ], 443 | [ 444 | 40, 445 | 6, 446 | 0, 447 | 22, 448 | 1, 449 | "CONDITIONING" 450 | ], 451 | [ 452 | 52, 453 | 22, 454 | 0, 455 | 13, 456 | 1, 457 | "GUIDER" 458 | ], 459 | [ 460 | 54, 461 | 29, 462 | 0, 463 | 13, 464 | 4, 465 | "LATENT" 466 | ], 467 | [ 468 | 55, 469 | 13, 470 | 0, 471 | 8, 472 | 0, 473 | "LATENT" 474 | ], 475 | [ 476 | 59, 477 | 31, 478 | 0, 479 | 22, 480 | 0, 481 | "MODEL" 482 | ], 483 | [ 484 | 60, 485 | 31, 486 | 0, 487 | 17, 488 | 0, 489 | "MODEL" 490 | ], 491 | [ 492 | 72, 493 | 31, 494 | 1, 495 | 6, 496 | 0, 497 | "CLIP" 498 | ], 499 | [ 500 | 73, 501 | 31, 502 | 2, 503 | 8, 504 | 1, 505 | "VAE" 506 | ] 507 | ], 508 | "groups": [], 509 | "config": {}, 510 | "extra": { 511 | "ds": { 512 | "scale": 1, 513 | "offset": [ 514 | 250.82895432583723, 515 | -59.94610136753573 516 | ] 517 | } 518 | }, 519 | "version": 0.4 520 | } -------------------------------------------------------------------------------- /flux1-dev-gguf-Q4-workflow.json: -------------------------------------------------------------------------------- 1 | { 2 | "last_node_id": 34, 3 | "last_link_id": 47, 4 | "nodes": [ 5 | { 6 | "id": 5, 7 | "type": "EmptyLatentImage", 8 | "pos": [ 9 | 480, 10 | 432 11 | ], 12 | "size": { 13 | "0": 315, 14 | "1": 106 15 | }, 16 | "flags": {}, 17 | "order": 0, 18 | "mode": 0, 19 | "outputs": [ 20 | { 21 | "name": "LATENT", 22 | "type": "LATENT", 23 | "links": [ 24 | 45 25 | ], 26 | "slot_index": 0 27 | } 28 | ], 29 | "properties": { 30 | "Node name for S&R": "EmptyLatentImage" 31 | }, 32 | "widgets_values": [ 33 | 1024, 34 | 1024, 35 | 1 36 | ], 37 | "color": "#323", 38 | "bgcolor": "#535" 39 | }, 40 | { 41 | "id": 6, 42 | "type": "CLIPTextEncode", 43 | "pos": [ 44 | 379, 45 | 208 46 | ], 47 | "size": { 48 | "0": 422.84503173828125, 49 | "1": 164.31304931640625 50 | }, 51 | "flags": {}, 52 | "order": 5, 53 | "mode": 0, 54 | "inputs": [ 55 | { 56 | "name": "clip", 57 | "type": "CLIP", 58 | "link": 10 59 | } 60 | ], 61 | "outputs": [ 62 | { 63 | "name": "CONDITIONING", 64 | "type": "CONDITIONING", 65 | "links": [ 66 | 43 67 | ], 68 | "slot_index": 0 69 | } 70 | ], 71 | "properties": { 72 | "Node name for S&R": "CLIPTextEncode" 73 | }, 74 | "widgets_values": [ 75 | "a cat wearing a yellow hat, hold a sign saying \"tech-practice\"\n\n\n" 76 | ], 77 | "color": "#232", 78 | "bgcolor": "#353" 79 | }, 80 | { 81 | "id": 8, 82 | "type": "VAEDecode", 83 | "pos": [ 84 | 1248, 85 | 192 86 | ], 87 | "size": { 88 | "0": 210, 89 | "1": 46 90 | }, 91 | "flags": {}, 92 | "order": 8, 93 | "mode": 0, 94 | "inputs": [ 95 | { 96 | "name": "samples", 97 | "type": "LATENT", 98 | "link": 44 99 | }, 100 | { 101 | "name": "vae", 102 | "type": "VAE", 103 | "link": 12 104 | } 105 | ], 106 | "outputs": [ 107 | { 108 | "name": "IMAGE", 109 | "type": "IMAGE", 110 | "links": [ 111 | 9 112 | ], 113 | "slot_index": 0 114 | } 115 | ], 116 | "properties": { 117 | "Node name for S&R": "VAEDecode" 118 | } 119 | }, 120 | { 121 | "id": 9, 122 | "type": "SaveImage", 123 | "pos": [ 124 | 1354, 125 | 336 126 | ], 127 | "size": { 128 | "0": 534.0767822265625, 129 | "1": 607.572265625 130 | }, 131 | "flags": {}, 132 | "order": 9, 133 | "mode": 0, 134 | "inputs": [ 135 | { 136 | "name": "images", 137 | "type": "IMAGE", 138 | "link": 9 139 | } 140 | ], 141 | "properties": {}, 142 | "widgets_values": [ 143 | "ComfyUI" 144 | ] 145 | }, 146 | { 147 | "id": 10, 148 | "type": "VAELoader", 149 | "pos": [ 150 | 38, 151 | 363 152 | ], 153 | "size": { 154 | "0": 315, 155 | "1": 58 156 | }, 157 | "flags": {}, 158 | "order": 1, 159 | "mode": 0, 160 | "outputs": [ 161 | { 162 | "name": "VAE", 163 | "type": "VAE", 164 | "links": [ 165 | 12 166 | ], 167 | "slot_index": 0, 168 | "shape": 3 169 | } 170 | ], 171 | "properties": { 172 | "Node name for S&R": "VAELoader" 173 | }, 174 | "widgets_values": [ 175 | "ae.safetensors" 176 | ] 177 | }, 178 | { 179 | "id": 11, 180 | "type": "DualCLIPLoader", 181 | "pos": [ 182 | 34, 183 | 135 184 | ], 185 | "size": { 186 | "0": 315, 187 | "1": 106 188 | }, 189 | "flags": {}, 190 | "order": 2, 191 | "mode": 0, 192 | "outputs": [ 193 | { 194 | "name": "CLIP", 195 | "type": "CLIP", 196 | "links": [ 197 | 10, 198 | 47 199 | ], 200 | "slot_index": 0, 201 | "shape": 3 202 | } 203 | ], 204 | "properties": { 205 | "Node name for S&R": "DualCLIPLoader" 206 | }, 207 | "widgets_values": [ 208 | "t5xxl_fp8_e4m3fn.safetensors", 209 | "clip_l.safetensors", 210 | "flux" 211 | ] 212 | }, 213 | { 214 | "id": 26, 215 | "type": "Note", 216 | "pos": [ 217 | 48, 218 | 480 219 | ], 220 | "size": { 221 | "0": 336, 222 | "1": 288 223 | }, 224 | "flags": {}, 225 | "order": 3, 226 | "mode": 0, 227 | "properties": { 228 | "text": "" 229 | }, 230 | "widgets_values": [ 231 | "If you get an error in any of the nodes above make sure the files are in the correct directories.\n\nSee the top of the examples page for the links : https://comfyanonymous.github.io/ComfyUI_examples/flux/\n\nflux1-schnell.safetensors goes in: ComfyUI/models/unet/\n\nt5xxl_fp16.safetensors and clip_l.safetensors go in: ComfyUI/models/clip/\n\nae.safetensors goes in: ComfyUI/models/vae/\n\n\nTip: You can set the weight_dtype above to one of the fp8 types if you have memory issues." 232 | ], 233 | "color": "#432", 234 | "bgcolor": "#653" 235 | }, 236 | { 237 | "id": 28, 238 | "type": "KSampler", 239 | "pos": [ 240 | 893, 241 | 368 242 | ], 243 | "size": { 244 | "0": 388.91705322265625, 245 | "1": 510.07452392578125 246 | }, 247 | "flags": {}, 248 | "order": 7, 249 | "mode": 0, 250 | "inputs": [ 251 | { 252 | "name": "model", 253 | "type": "MODEL", 254 | "link": 42 255 | }, 256 | { 257 | "name": "positive", 258 | "type": "CONDITIONING", 259 | "link": 43 260 | }, 261 | { 262 | "name": "negative", 263 | "type": "CONDITIONING", 264 | "link": 46 265 | }, 266 | { 267 | "name": "latent_image", 268 | "type": "LATENT", 269 | "link": 45 270 | } 271 | ], 272 | "outputs": [ 273 | { 274 | "name": "LATENT", 275 | "type": "LATENT", 276 | "links": [ 277 | 44 278 | ], 279 | "slot_index": 0, 280 | "shape": 3 281 | } 282 | ], 283 | "properties": { 284 | "Node name for S&R": "KSampler" 285 | }, 286 | "widgets_values": [ 287 | 17, 288 | "increment", 289 | 20, 290 | 1, 291 | "euler", 292 | "normal", 293 | 1 294 | ] 295 | }, 296 | { 297 | "id": 33, 298 | "type": "UnetLoaderGGUF", 299 | "pos": [ 300 | 427, 301 | 48 302 | ], 303 | "size": { 304 | "0": 315, 305 | "1": 58 306 | }, 307 | "flags": {}, 308 | "order": 4, 309 | "mode": 0, 310 | "outputs": [ 311 | { 312 | "name": "MODEL", 313 | "type": "MODEL", 314 | "links": [ 315 | 42 316 | ], 317 | "slot_index": 0, 318 | "shape": 3 319 | } 320 | ], 321 | "properties": { 322 | "Node name for S&R": "UnetLoaderGGUF" 323 | }, 324 | "widgets_values": [ 325 | "flux1-dev-Q4_0.gguf" 326 | ] 327 | }, 328 | { 329 | "id": 34, 330 | "type": "CLIPTextEncode", 331 | "pos": [ 332 | 512, 333 | 596 334 | ], 335 | "size": { 336 | "0": 422.84503173828125, 337 | "1": 164.31304931640625 338 | }, 339 | "flags": { 340 | "collapsed": true 341 | }, 342 | "order": 6, 343 | "mode": 0, 344 | "inputs": [ 345 | { 346 | "name": "clip", 347 | "type": "CLIP", 348 | "link": 47, 349 | "slot_index": 0 350 | } 351 | ], 352 | "outputs": [ 353 | { 354 | "name": "CONDITIONING", 355 | "type": "CONDITIONING", 356 | "links": [ 357 | 46 358 | ], 359 | "slot_index": 0 360 | } 361 | ], 362 | "title": "CLIP Text Encode (Negative Prompt)", 363 | "properties": { 364 | "Node name for S&R": "CLIPTextEncode" 365 | }, 366 | "widgets_values": [ 367 | "" 368 | ], 369 | "color": "#322", 370 | "bgcolor": "#533" 371 | } 372 | ], 373 | "links": [ 374 | [ 375 | 9, 376 | 8, 377 | 0, 378 | 9, 379 | 0, 380 | "IMAGE" 381 | ], 382 | [ 383 | 10, 384 | 11, 385 | 0, 386 | 6, 387 | 0, 388 | "CLIP" 389 | ], 390 | [ 391 | 12, 392 | 10, 393 | 0, 394 | 8, 395 | 1, 396 | "VAE" 397 | ], 398 | [ 399 | 42, 400 | 33, 401 | 0, 402 | 28, 403 | 0, 404 | "MODEL" 405 | ], 406 | [ 407 | 43, 408 | 6, 409 | 0, 410 | 28, 411 | 1, 412 | "CONDITIONING" 413 | ], 414 | [ 415 | 44, 416 | 28, 417 | 0, 418 | 8, 419 | 0, 420 | "LATENT" 421 | ], 422 | [ 423 | 45, 424 | 5, 425 | 0, 426 | 28, 427 | 3, 428 | "LATENT" 429 | ], 430 | [ 431 | 46, 432 | 34, 433 | 0, 434 | 28, 435 | 2, 436 | "CONDITIONING" 437 | ], 438 | [ 439 | 47, 440 | 11, 441 | 0, 442 | 34, 443 | 0, 444 | "CLIP" 445 | ] 446 | ], 447 | "groups": [], 448 | "config": {}, 449 | "extra": { 450 | "ds": { 451 | "scale": 0.751314800901578, 452 | "offset": [ 453 | 269.56340228245523, 454 | 222.99276164256668 455 | ] 456 | } 457 | }, 458 | "version": 0.4 459 | } -------------------------------------------------------------------------------- /flux1-schnell-bnb-nf4-workflow.json: -------------------------------------------------------------------------------- 1 | { 2 | "last_node_id": 34, 3 | "last_link_id": 73, 4 | "nodes": [ 5 | { 6 | "id": 6, 7 | "type": "CLIPTextEncode", 8 | "pos": [ 9 | 325, 10 | 246 11 | ], 12 | "size": { 13 | "0": 422.84503173828125, 14 | "1": 164.31304931640625 15 | }, 16 | "flags": {}, 17 | "order": 5, 18 | "mode": 0, 19 | "inputs": [ 20 | { 21 | "name": "clip", 22 | "type": "CLIP", 23 | "link": 72 24 | } 25 | ], 26 | "outputs": [ 27 | { 28 | "name": "CONDITIONING", 29 | "type": "CONDITIONING", 30 | "links": [ 31 | 40 32 | ], 33 | "slot_index": 0 34 | } 35 | ], 36 | "properties": { 37 | "Node name for S&R": "CLIPTextEncode" 38 | }, 39 | "widgets_values": [ 40 | "girl with green hat, summer, city street, a sign saying \"Tech-Practice\"" 41 | ] 42 | }, 43 | { 44 | "id": 8, 45 | "type": "VAEDecode", 46 | "pos": [ 47 | 903, 48 | 407 49 | ], 50 | "size": { 51 | "0": 140, 52 | "1": 46 53 | }, 54 | "flags": {}, 55 | "order": 8, 56 | "mode": 0, 57 | "inputs": [ 58 | { 59 | "name": "samples", 60 | "type": "LATENT", 61 | "link": 55 62 | }, 63 | { 64 | "name": "vae", 65 | "type": "VAE", 66 | "link": 73 67 | } 68 | ], 69 | "outputs": [ 70 | { 71 | "name": "IMAGE", 72 | "type": "IMAGE", 73 | "links": [ 74 | 9 75 | ], 76 | "slot_index": 0 77 | } 78 | ], 79 | "properties": { 80 | "Node name for S&R": "VAEDecode" 81 | } 82 | }, 83 | { 84 | "id": 9, 85 | "type": "SaveImage", 86 | "pos": [ 87 | 863, 88 | 618 89 | ], 90 | "size": { 91 | "0": 475.06707763671875, 92 | "1": 435.2092590332031 93 | }, 94 | "flags": {}, 95 | "order": 9, 96 | "mode": 0, 97 | "inputs": [ 98 | { 99 | "name": "images", 100 | "type": "IMAGE", 101 | "link": 9 102 | } 103 | ], 104 | "properties": {}, 105 | "widgets_values": [ 106 | "ComfyUI" 107 | ] 108 | }, 109 | { 110 | "id": 13, 111 | "type": "SamplerCustomAdvanced", 112 | "pos": [ 113 | 842, 114 | 215 115 | ], 116 | "size": { 117 | "0": 355.20001220703125, 118 | "1": 106 119 | }, 120 | "flags": {}, 121 | "order": 7, 122 | "mode": 0, 123 | "inputs": [ 124 | { 125 | "name": "noise", 126 | "type": "NOISE", 127 | "link": 37, 128 | "slot_index": 0 129 | }, 130 | { 131 | "name": "guider", 132 | "type": "GUIDER", 133 | "link": 52, 134 | "slot_index": 1 135 | }, 136 | { 137 | "name": "sampler", 138 | "type": "SAMPLER", 139 | "link": 19, 140 | "slot_index": 2 141 | }, 142 | { 143 | "name": "sigmas", 144 | "type": "SIGMAS", 145 | "link": 20, 146 | "slot_index": 3 147 | }, 148 | { 149 | "name": "latent_image", 150 | "type": "LATENT", 151 | "link": 54, 152 | "slot_index": 4 153 | } 154 | ], 155 | "outputs": [ 156 | { 157 | "name": "output", 158 | "type": "LATENT", 159 | "links": [ 160 | 55 161 | ], 162 | "slot_index": 0, 163 | "shape": 3 164 | }, 165 | { 166 | "name": "denoised_output", 167 | "type": "LATENT", 168 | "links": [], 169 | "slot_index": 1, 170 | "shape": 3 171 | } 172 | ], 173 | "properties": { 174 | "Node name for S&R": "SamplerCustomAdvanced" 175 | } 176 | }, 177 | { 178 | "id": 16, 179 | "type": "KSamplerSelect", 180 | "pos": [ 181 | 464, 182 | 796 183 | ], 184 | "size": { 185 | "0": 315, 186 | "1": 58 187 | }, 188 | "flags": {}, 189 | "order": 0, 190 | "mode": 0, 191 | "outputs": [ 192 | { 193 | "name": "SAMPLER", 194 | "type": "SAMPLER", 195 | "links": [ 196 | 19 197 | ], 198 | "shape": 3 199 | } 200 | ], 201 | "properties": { 202 | "Node name for S&R": "KSamplerSelect" 203 | }, 204 | "widgets_values": [ 205 | "dpmpp_2m" 206 | ] 207 | }, 208 | { 209 | "id": 17, 210 | "type": "BasicScheduler", 211 | "pos": [ 212 | 461, 213 | 914 214 | ], 215 | "size": { 216 | "0": 315, 217 | "1": 106 218 | }, 219 | "flags": {}, 220 | "order": 4, 221 | "mode": 0, 222 | "inputs": [ 223 | { 224 | "name": "model", 225 | "type": "MODEL", 226 | "link": 60, 227 | "slot_index": 0 228 | } 229 | ], 230 | "outputs": [ 231 | { 232 | "name": "SIGMAS", 233 | "type": "SIGMAS", 234 | "links": [ 235 | 20 236 | ], 237 | "shape": 3 238 | } 239 | ], 240 | "properties": { 241 | "Node name for S&R": "BasicScheduler" 242 | }, 243 | "widgets_values": [ 244 | "simple", 245 | 4, 246 | 1 247 | ] 248 | }, 249 | { 250 | "id": 22, 251 | "type": "BasicGuider", 252 | "pos": [ 253 | 463, 254 | 96 255 | ], 256 | "size": { 257 | "0": 241.79998779296875, 258 | "1": 46 259 | }, 260 | "flags": {}, 261 | "order": 6, 262 | "mode": 0, 263 | "inputs": [ 264 | { 265 | "name": "model", 266 | "type": "MODEL", 267 | "link": 59, 268 | "slot_index": 0 269 | }, 270 | { 271 | "name": "conditioning", 272 | "type": "CONDITIONING", 273 | "link": 40, 274 | "slot_index": 1 275 | } 276 | ], 277 | "outputs": [ 278 | { 279 | "name": "GUIDER", 280 | "type": "GUIDER", 281 | "links": [ 282 | 52 283 | ], 284 | "slot_index": 0, 285 | "shape": 3 286 | } 287 | ], 288 | "properties": { 289 | "Node name for S&R": "BasicGuider" 290 | } 291 | }, 292 | { 293 | "id": 25, 294 | "type": "RandomNoise", 295 | "pos": [ 296 | 464, 297 | 649 298 | ], 299 | "size": { 300 | "0": 315, 301 | "1": 82 302 | }, 303 | "flags": {}, 304 | "order": 1, 305 | "mode": 0, 306 | "outputs": [ 307 | { 308 | "name": "NOISE", 309 | "type": "NOISE", 310 | "links": [ 311 | 37 312 | ], 313 | "shape": 3 314 | } 315 | ], 316 | "properties": { 317 | "Node name for S&R": "RandomNoise" 318 | }, 319 | "widgets_values": [ 320 | 108847103651227, 321 | "increment" 322 | ] 323 | }, 324 | { 325 | "id": 29, 326 | "type": "EmptySD3LatentImage", 327 | "pos": [ 328 | 470, 329 | 481 330 | ], 331 | "size": { 332 | "0": 304.2161865234375, 333 | "1": 106 334 | }, 335 | "flags": {}, 336 | "order": 2, 337 | "mode": 0, 338 | "outputs": [ 339 | { 340 | "name": "LATENT", 341 | "type": "LATENT", 342 | "links": [ 343 | 54 344 | ], 345 | "slot_index": 0, 346 | "shape": 3 347 | } 348 | ], 349 | "properties": { 350 | "Node name for S&R": "EmptySD3LatentImage" 351 | }, 352 | "widgets_values": [ 353 | 1024, 354 | 1024, 355 | 1 356 | ] 357 | }, 358 | { 359 | "id": 31, 360 | "type": "CheckpointLoaderNF4", 361 | "pos": [ 362 | -77, 363 | 549 364 | ], 365 | "size": { 366 | "0": 373.1930847167969, 367 | "1": 121.38424682617188 368 | }, 369 | "flags": {}, 370 | "order": 3, 371 | "mode": 0, 372 | "outputs": [ 373 | { 374 | "name": "MODEL", 375 | "type": "MODEL", 376 | "links": [ 377 | 59, 378 | 60 379 | ], 380 | "slot_index": 0, 381 | "shape": 3 382 | }, 383 | { 384 | "name": "CLIP", 385 | "type": "CLIP", 386 | "links": [ 387 | 72 388 | ], 389 | "slot_index": 1, 390 | "shape": 3 391 | }, 392 | { 393 | "name": "VAE", 394 | "type": "VAE", 395 | "links": [ 396 | 73 397 | ], 398 | "slot_index": 2, 399 | "shape": 3 400 | } 401 | ], 402 | "properties": { 403 | "Node name for S&R": "CheckpointLoaderNF4" 404 | }, 405 | "widgets_values": [ 406 | "flux1-schnell-bnb-nf4.safetensors" 407 | ] 408 | } 409 | ], 410 | "links": [ 411 | [ 412 | 9, 413 | 8, 414 | 0, 415 | 9, 416 | 0, 417 | "IMAGE" 418 | ], 419 | [ 420 | 19, 421 | 16, 422 | 0, 423 | 13, 424 | 2, 425 | "SAMPLER" 426 | ], 427 | [ 428 | 20, 429 | 17, 430 | 0, 431 | 13, 432 | 3, 433 | "SIGMAS" 434 | ], 435 | [ 436 | 37, 437 | 25, 438 | 0, 439 | 13, 440 | 0, 441 | "NOISE" 442 | ], 443 | [ 444 | 40, 445 | 6, 446 | 0, 447 | 22, 448 | 1, 449 | "CONDITIONING" 450 | ], 451 | [ 452 | 52, 453 | 22, 454 | 0, 455 | 13, 456 | 1, 457 | "GUIDER" 458 | ], 459 | [ 460 | 54, 461 | 29, 462 | 0, 463 | 13, 464 | 4, 465 | "LATENT" 466 | ], 467 | [ 468 | 55, 469 | 13, 470 | 0, 471 | 8, 472 | 0, 473 | "LATENT" 474 | ], 475 | [ 476 | 59, 477 | 31, 478 | 0, 479 | 22, 480 | 0, 481 | "MODEL" 482 | ], 483 | [ 484 | 60, 485 | 31, 486 | 0, 487 | 17, 488 | 0, 489 | "MODEL" 490 | ], 491 | [ 492 | 72, 493 | 31, 494 | 1, 495 | 6, 496 | 0, 497 | "CLIP" 498 | ], 499 | [ 500 | 73, 501 | 31, 502 | 2, 503 | 8, 504 | 1, 505 | "VAE" 506 | ] 507 | ], 508 | "groups": [], 509 | "config": {}, 510 | "extra": { 511 | "ds": { 512 | "scale": 0.8264462809917354, 513 | "offset": [ 514 | 316.3601737167255, 515 | 43.95594726718212 516 | ] 517 | } 518 | }, 519 | "version": 0.4 520 | } -------------------------------------------------------------------------------- /flux1-schnell-gguf-Q4-workflow.json: -------------------------------------------------------------------------------- 1 | { 2 | "last_node_id": 34, 3 | "last_link_id": 47, 4 | "nodes": [ 5 | { 6 | "id": 5, 7 | "type": "EmptyLatentImage", 8 | "pos": [ 9 | 480, 10 | 432 11 | ], 12 | "size": { 13 | "0": 315, 14 | "1": 106 15 | }, 16 | "flags": {}, 17 | "order": 0, 18 | "mode": 0, 19 | "outputs": [ 20 | { 21 | "name": "LATENT", 22 | "type": "LATENT", 23 | "links": [ 24 | 45 25 | ], 26 | "slot_index": 0 27 | } 28 | ], 29 | "properties": { 30 | "Node name for S&R": "EmptyLatentImage" 31 | }, 32 | "widgets_values": [ 33 | 1024, 34 | 1024, 35 | 1 36 | ], 37 | "color": "#323", 38 | "bgcolor": "#535" 39 | }, 40 | { 41 | "id": 6, 42 | "type": "CLIPTextEncode", 43 | "pos": [ 44 | 379, 45 | 208 46 | ], 47 | "size": { 48 | "0": 422.84503173828125, 49 | "1": 164.31304931640625 50 | }, 51 | "flags": {}, 52 | "order": 5, 53 | "mode": 0, 54 | "inputs": [ 55 | { 56 | "name": "clip", 57 | "type": "CLIP", 58 | "link": 10 59 | } 60 | ], 61 | "outputs": [ 62 | { 63 | "name": "CONDITIONING", 64 | "type": "CONDITIONING", 65 | "links": [ 66 | 43 67 | ], 68 | "slot_index": 0 69 | } 70 | ], 71 | "properties": { 72 | "Node name for S&R": "CLIPTextEncode" 73 | }, 74 | "widgets_values": [ 75 | "a cat wearing a yellow hat, hold a sign saying \"tech-practice\"\n\n\n" 76 | ], 77 | "color": "#232", 78 | "bgcolor": "#353" 79 | }, 80 | { 81 | "id": 8, 82 | "type": "VAEDecode", 83 | "pos": [ 84 | 1248, 85 | 192 86 | ], 87 | "size": { 88 | "0": 210, 89 | "1": 46 90 | }, 91 | "flags": {}, 92 | "order": 8, 93 | "mode": 0, 94 | "inputs": [ 95 | { 96 | "name": "samples", 97 | "type": "LATENT", 98 | "link": 44 99 | }, 100 | { 101 | "name": "vae", 102 | "type": "VAE", 103 | "link": 12 104 | } 105 | ], 106 | "outputs": [ 107 | { 108 | "name": "IMAGE", 109 | "type": "IMAGE", 110 | "links": [ 111 | 9 112 | ], 113 | "slot_index": 0 114 | } 115 | ], 116 | "properties": { 117 | "Node name for S&R": "VAEDecode" 118 | } 119 | }, 120 | { 121 | "id": 9, 122 | "type": "SaveImage", 123 | "pos": [ 124 | 1354, 125 | 336 126 | ], 127 | "size": { 128 | "0": 534.0767822265625, 129 | "1": 607.572265625 130 | }, 131 | "flags": {}, 132 | "order": 9, 133 | "mode": 0, 134 | "inputs": [ 135 | { 136 | "name": "images", 137 | "type": "IMAGE", 138 | "link": 9 139 | } 140 | ], 141 | "properties": {}, 142 | "widgets_values": [ 143 | "ComfyUI" 144 | ] 145 | }, 146 | { 147 | "id": 10, 148 | "type": "VAELoader", 149 | "pos": [ 150 | 38, 151 | 363 152 | ], 153 | "size": { 154 | "0": 315, 155 | "1": 58 156 | }, 157 | "flags": {}, 158 | "order": 1, 159 | "mode": 0, 160 | "outputs": [ 161 | { 162 | "name": "VAE", 163 | "type": "VAE", 164 | "links": [ 165 | 12 166 | ], 167 | "slot_index": 0, 168 | "shape": 3 169 | } 170 | ], 171 | "properties": { 172 | "Node name for S&R": "VAELoader" 173 | }, 174 | "widgets_values": [ 175 | "ae.safetensors" 176 | ] 177 | }, 178 | { 179 | "id": 11, 180 | "type": "DualCLIPLoader", 181 | "pos": [ 182 | 34, 183 | 135 184 | ], 185 | "size": { 186 | "0": 315, 187 | "1": 106 188 | }, 189 | "flags": {}, 190 | "order": 2, 191 | "mode": 0, 192 | "outputs": [ 193 | { 194 | "name": "CLIP", 195 | "type": "CLIP", 196 | "links": [ 197 | 10, 198 | 47 199 | ], 200 | "slot_index": 0, 201 | "shape": 3 202 | } 203 | ], 204 | "properties": { 205 | "Node name for S&R": "DualCLIPLoader" 206 | }, 207 | "widgets_values": [ 208 | "t5xxl_fp8_e4m3fn.safetensors", 209 | "clip_l.safetensors", 210 | "flux" 211 | ] 212 | }, 213 | { 214 | "id": 26, 215 | "type": "Note", 216 | "pos": [ 217 | 48, 218 | 480 219 | ], 220 | "size": { 221 | "0": 336, 222 | "1": 288 223 | }, 224 | "flags": {}, 225 | "order": 3, 226 | "mode": 0, 227 | "properties": { 228 | "text": "" 229 | }, 230 | "widgets_values": [ 231 | "If you get an error in any of the nodes above make sure the files are in the correct directories.\n\nSee the top of the examples page for the links : https://comfyanonymous.github.io/ComfyUI_examples/flux/\n\nflux1-schnell.safetensors goes in: ComfyUI/models/unet/\n\nt5xxl_fp16.safetensors and clip_l.safetensors go in: ComfyUI/models/clip/\n\nae.safetensors goes in: ComfyUI/models/vae/\n\n\nTip: You can set the weight_dtype above to one of the fp8 types if you have memory issues." 232 | ], 233 | "color": "#432", 234 | "bgcolor": "#653" 235 | }, 236 | { 237 | "id": 28, 238 | "type": "KSampler", 239 | "pos": [ 240 | 893, 241 | 368 242 | ], 243 | "size": { 244 | "0": 388.91705322265625, 245 | "1": 510.07452392578125 246 | }, 247 | "flags": {}, 248 | "order": 7, 249 | "mode": 0, 250 | "inputs": [ 251 | { 252 | "name": "model", 253 | "type": "MODEL", 254 | "link": 42 255 | }, 256 | { 257 | "name": "positive", 258 | "type": "CONDITIONING", 259 | "link": 43 260 | }, 261 | { 262 | "name": "negative", 263 | "type": "CONDITIONING", 264 | "link": 46 265 | }, 266 | { 267 | "name": "latent_image", 268 | "type": "LATENT", 269 | "link": 45 270 | } 271 | ], 272 | "outputs": [ 273 | { 274 | "name": "LATENT", 275 | "type": "LATENT", 276 | "links": [ 277 | 44 278 | ], 279 | "slot_index": 0, 280 | "shape": 3 281 | } 282 | ], 283 | "properties": { 284 | "Node name for S&R": "KSampler" 285 | }, 286 | "widgets_values": [ 287 | 17, 288 | "increment", 289 | 4, 290 | 1, 291 | "euler", 292 | "normal", 293 | 1 294 | ] 295 | }, 296 | { 297 | "id": 33, 298 | "type": "UnetLoaderGGUF", 299 | "pos": [ 300 | 427, 301 | 48 302 | ], 303 | "size": { 304 | "0": 315, 305 | "1": 58 306 | }, 307 | "flags": {}, 308 | "order": 4, 309 | "mode": 0, 310 | "outputs": [ 311 | { 312 | "name": "MODEL", 313 | "type": "MODEL", 314 | "links": [ 315 | 42 316 | ], 317 | "slot_index": 0, 318 | "shape": 3 319 | } 320 | ], 321 | "properties": { 322 | "Node name for S&R": "UnetLoaderGGUF" 323 | }, 324 | "widgets_values": [ 325 | "flux1-schnell-Q4_0.gguf" 326 | ] 327 | }, 328 | { 329 | "id": 34, 330 | "type": "CLIPTextEncode", 331 | "pos": [ 332 | 512, 333 | 596 334 | ], 335 | "size": { 336 | "0": 422.84503173828125, 337 | "1": 164.31304931640625 338 | }, 339 | "flags": { 340 | "collapsed": true 341 | }, 342 | "order": 6, 343 | "mode": 0, 344 | "inputs": [ 345 | { 346 | "name": "clip", 347 | "type": "CLIP", 348 | "link": 47, 349 | "slot_index": 0 350 | } 351 | ], 352 | "outputs": [ 353 | { 354 | "name": "CONDITIONING", 355 | "type": "CONDITIONING", 356 | "links": [ 357 | 46 358 | ], 359 | "slot_index": 0 360 | } 361 | ], 362 | "title": "CLIP Text Encode (Negative Prompt)", 363 | "properties": { 364 | "Node name for S&R": "CLIPTextEncode" 365 | }, 366 | "widgets_values": [ 367 | "" 368 | ], 369 | "color": "#322", 370 | "bgcolor": "#533" 371 | } 372 | ], 373 | "links": [ 374 | [ 375 | 9, 376 | 8, 377 | 0, 378 | 9, 379 | 0, 380 | "IMAGE" 381 | ], 382 | [ 383 | 10, 384 | 11, 385 | 0, 386 | 6, 387 | 0, 388 | "CLIP" 389 | ], 390 | [ 391 | 12, 392 | 10, 393 | 0, 394 | 8, 395 | 1, 396 | "VAE" 397 | ], 398 | [ 399 | 42, 400 | 33, 401 | 0, 402 | 28, 403 | 0, 404 | "MODEL" 405 | ], 406 | [ 407 | 43, 408 | 6, 409 | 0, 410 | 28, 411 | 1, 412 | "CONDITIONING" 413 | ], 414 | [ 415 | 44, 416 | 28, 417 | 0, 418 | 8, 419 | 0, 420 | "LATENT" 421 | ], 422 | [ 423 | 45, 424 | 5, 425 | 0, 426 | 28, 427 | 3, 428 | "LATENT" 429 | ], 430 | [ 431 | 46, 432 | 34, 433 | 0, 434 | 28, 435 | 2, 436 | "CONDITIONING" 437 | ], 438 | [ 439 | 47, 440 | 11, 441 | 0, 442 | 34, 443 | 0, 444 | "CLIP" 445 | ] 446 | ], 447 | "groups": [], 448 | "config": {}, 449 | "extra": { 450 | "ds": { 451 | "scale": 0.8264462809917358, 452 | "offset": [ 453 | 85.93380228245525, 454 | 172.31796164256676 455 | ] 456 | } 457 | }, 458 | "version": 0.4 459 | } -------------------------------------------------------------------------------- /flux_dev_checkpoint_example.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ttio2tech/ComfyUI_workflows_collection/7ba66522ad516317a435b2fd6bacc9a256dcf7a3/flux_dev_checkpoint_example.png -------------------------------------------------------------------------------- /flux_schnell_checkpoint_fp8.json: -------------------------------------------------------------------------------- 1 | { 2 | "last_node_id": 36, 3 | "last_link_id": 57, 4 | "nodes": [ 5 | { 6 | "id": 6, 7 | "type": "CLIPTextEncode", 8 | "pos": [ 9 | 384, 10 | 192 11 | ], 12 | "size": { 13 | "0": 422.84503173828125, 14 | "1": 164.31304931640625 15 | }, 16 | "flags": {}, 17 | "order": 3, 18 | "mode": 0, 19 | "inputs": [ 20 | { 21 | "name": "clip", 22 | "type": "CLIP", 23 | "link": 45 24 | } 25 | ], 26 | "outputs": [ 27 | { 28 | "name": "CONDITIONING", 29 | "type": "CONDITIONING", 30 | "links": [ 31 | 56 32 | ], 33 | "slot_index": 0 34 | } 35 | ], 36 | "title": "CLIP Text Encode (Positive Prompt)", 37 | "properties": { 38 | "Node name for S&R": "CLIPTextEncode" 39 | }, 40 | "widgets_values": [ 41 | "dog wears a brown hat, hold a sign saying \"Tech-Practice\"" 42 | ], 43 | "color": "#232", 44 | "bgcolor": "#353" 45 | }, 46 | { 47 | "id": 8, 48 | "type": "VAEDecode", 49 | "pos": [ 50 | 1151, 51 | 195 52 | ], 53 | "size": { 54 | "0": 210, 55 | "1": 46 56 | }, 57 | "flags": {}, 58 | "order": 7, 59 | "mode": 0, 60 | "inputs": [ 61 | { 62 | "name": "samples", 63 | "type": "LATENT", 64 | "link": 52 65 | }, 66 | { 67 | "name": "vae", 68 | "type": "VAE", 69 | "link": 46 70 | } 71 | ], 72 | "outputs": [ 73 | { 74 | "name": "IMAGE", 75 | "type": "IMAGE", 76 | "links": [ 77 | 9 78 | ], 79 | "slot_index": 0 80 | } 81 | ], 82 | "properties": { 83 | "Node name for S&R": "VAEDecode" 84 | } 85 | }, 86 | { 87 | "id": 9, 88 | "type": "SaveImage", 89 | "pos": [ 90 | 1157, 91 | 307 92 | ], 93 | "size": [ 94 | 789.9044582750616, 95 | 546.781838304472 96 | ], 97 | "flags": {}, 98 | "order": 8, 99 | "mode": 0, 100 | "inputs": [ 101 | { 102 | "name": "images", 103 | "type": "IMAGE", 104 | "link": 9 105 | } 106 | ], 107 | "properties": {}, 108 | "widgets_values": [ 109 | "ComfyUI" 110 | ] 111 | }, 112 | { 113 | "id": 27, 114 | "type": "EmptySD3LatentImage", 115 | "pos": [ 116 | 471, 117 | 455 118 | ], 119 | "size": { 120 | "0": 315, 121 | "1": 106 122 | }, 123 | "flags": {}, 124 | "order": 0, 125 | "mode": 0, 126 | "outputs": [ 127 | { 128 | "name": "LATENT", 129 | "type": "LATENT", 130 | "links": [ 131 | 51 132 | ], 133 | "slot_index": 0, 134 | "shape": 3 135 | } 136 | ], 137 | "properties": { 138 | "Node name for S&R": "EmptySD3LatentImage" 139 | }, 140 | "widgets_values": [ 141 | 1024, 142 | 1024, 143 | 1 144 | ], 145 | "color": "#323", 146 | "bgcolor": "#535" 147 | }, 148 | { 149 | "id": 30, 150 | "type": "CheckpointLoaderSimple", 151 | "pos": [ 152 | 48, 153 | 192 154 | ], 155 | "size": { 156 | "0": 315, 157 | "1": 98 158 | }, 159 | "flags": {}, 160 | "order": 2, 161 | "mode": 0, 162 | "outputs": [ 163 | { 164 | "name": "MODEL", 165 | "type": "MODEL", 166 | "links": [ 167 | 47 168 | ], 169 | "slot_index": 0, 170 | "shape": 3 171 | }, 172 | { 173 | "name": "CLIP", 174 | "type": "CLIP", 175 | "links": [ 176 | 45, 177 | 54 178 | ], 179 | "slot_index": 1, 180 | "shape": 3 181 | }, 182 | { 183 | "name": "VAE", 184 | "type": "VAE", 185 | "links": [ 186 | 46 187 | ], 188 | "slot_index": 2, 189 | "shape": 3 190 | } 191 | ], 192 | "properties": { 193 | "Node name for S&R": "CheckpointLoaderSimple" 194 | }, 195 | "widgets_values": [ 196 | "flux1-schnell-fp8.safetensors" 197 | ] 198 | }, 199 | { 200 | "id": 31, 201 | "type": "KSampler", 202 | "pos": [ 203 | 816, 204 | 192 205 | ], 206 | "size": { 207 | "0": 315, 208 | "1": 262 209 | }, 210 | "flags": {}, 211 | "order": 6, 212 | "mode": 0, 213 | "inputs": [ 214 | { 215 | "name": "model", 216 | "type": "MODEL", 217 | "link": 47 218 | }, 219 | { 220 | "name": "positive", 221 | "type": "CONDITIONING", 222 | "link": 57 223 | }, 224 | { 225 | "name": "negative", 226 | "type": "CONDITIONING", 227 | "link": 55 228 | }, 229 | { 230 | "name": "latent_image", 231 | "type": "LATENT", 232 | "link": 51 233 | } 234 | ], 235 | "outputs": [ 236 | { 237 | "name": "LATENT", 238 | "type": "LATENT", 239 | "links": [ 240 | 52 241 | ], 242 | "slot_index": 0, 243 | "shape": 3 244 | } 245 | ], 246 | "properties": { 247 | "Node name for S&R": "KSampler" 248 | }, 249 | "widgets_values": [ 250 | 972054013131370, 251 | "increment", 252 | 4, 253 | 1, 254 | "euler", 255 | "simple", 256 | 1 257 | ] 258 | }, 259 | { 260 | "id": 33, 261 | "type": "CLIPTextEncode", 262 | "pos": [ 263 | 390, 264 | 400 265 | ], 266 | "size": { 267 | "0": 422.84503173828125, 268 | "1": 164.31304931640625 269 | }, 270 | "flags": { 271 | "collapsed": true 272 | }, 273 | "order": 4, 274 | "mode": 0, 275 | "inputs": [ 276 | { 277 | "name": "clip", 278 | "type": "CLIP", 279 | "link": 54, 280 | "slot_index": 0 281 | } 282 | ], 283 | "outputs": [ 284 | { 285 | "name": "CONDITIONING", 286 | "type": "CONDITIONING", 287 | "links": [ 288 | 55 289 | ], 290 | "slot_index": 0 291 | } 292 | ], 293 | "title": "CLIP Text Encode (Negative Prompt)", 294 | "properties": { 295 | "Node name for S&R": "CLIPTextEncode" 296 | }, 297 | "widgets_values": [ 298 | "" 299 | ], 300 | "color": "#322", 301 | "bgcolor": "#533" 302 | }, 303 | { 304 | "id": 34, 305 | "type": "Note", 306 | "pos": [ 307 | 831, 308 | 501 309 | ], 310 | "size": { 311 | "0": 282.8617858886719, 312 | "1": 164.08004760742188 313 | }, 314 | "flags": {}, 315 | "order": 1, 316 | "mode": 0, 317 | "properties": { 318 | "text": "" 319 | }, 320 | "widgets_values": [ 321 | "Note that Flux dev and schnell do not have any negative prompt so CFG should be set to 1.0. Setting CFG to 1.0 means the negative prompt is ignored." 322 | ], 323 | "color": "#432", 324 | "bgcolor": "#653" 325 | }, 326 | { 327 | "id": 35, 328 | "type": "FluxGuidance", 329 | "pos": [ 330 | 576, 331 | 96 332 | ], 333 | "size": { 334 | "0": 211.60000610351562, 335 | "1": 58 336 | }, 337 | "flags": {}, 338 | "order": 5, 339 | "mode": 0, 340 | "inputs": [ 341 | { 342 | "name": "conditioning", 343 | "type": "CONDITIONING", 344 | "link": 56 345 | } 346 | ], 347 | "outputs": [ 348 | { 349 | "name": "CONDITIONING", 350 | "type": "CONDITIONING", 351 | "links": [ 352 | 57 353 | ], 354 | "slot_index": 0, 355 | "shape": 3 356 | } 357 | ], 358 | "properties": { 359 | "Node name for S&R": "FluxGuidance" 360 | }, 361 | "widgets_values": [ 362 | 3.5 363 | ] 364 | } 365 | ], 366 | "links": [ 367 | [ 368 | 9, 369 | 8, 370 | 0, 371 | 9, 372 | 0, 373 | "IMAGE" 374 | ], 375 | [ 376 | 45, 377 | 30, 378 | 1, 379 | 6, 380 | 0, 381 | "CLIP" 382 | ], 383 | [ 384 | 46, 385 | 30, 386 | 2, 387 | 8, 388 | 1, 389 | "VAE" 390 | ], 391 | [ 392 | 47, 393 | 30, 394 | 0, 395 | 31, 396 | 0, 397 | "MODEL" 398 | ], 399 | [ 400 | 51, 401 | 27, 402 | 0, 403 | 31, 404 | 3, 405 | "LATENT" 406 | ], 407 | [ 408 | 52, 409 | 31, 410 | 0, 411 | 8, 412 | 0, 413 | "LATENT" 414 | ], 415 | [ 416 | 54, 417 | 30, 418 | 1, 419 | 33, 420 | 0, 421 | "CLIP" 422 | ], 423 | [ 424 | 55, 425 | 33, 426 | 0, 427 | 31, 428 | 2, 429 | "CONDITIONING" 430 | ], 431 | [ 432 | 56, 433 | 6, 434 | 0, 435 | 35, 436 | 0, 437 | "CONDITIONING" 438 | ], 439 | [ 440 | 57, 441 | 35, 442 | 0, 443 | 31, 444 | 1, 445 | "CONDITIONING" 446 | ] 447 | ], 448 | "groups": [], 449 | "config": {}, 450 | "extra": { 451 | "ds": { 452 | "scale": 0.8264462809917354, 453 | "offset": [ 454 | 145.72708909683837, 455 | 91.63638322445368 456 | ] 457 | } 458 | }, 459 | "version": 0.4 460 | } -------------------------------------------------------------------------------- /gguf_macbook_tech_practice_workflow.json: -------------------------------------------------------------------------------- 1 | { 2 | "last_node_id": 34, 3 | "last_link_id": 47, 4 | "nodes": [ 5 | { 6 | "id": 5, 7 | "type": "EmptyLatentImage", 8 | "pos": [ 9 | 480, 10 | 432 11 | ], 12 | "size": { 13 | "0": 315, 14 | "1": 106 15 | }, 16 | "flags": {}, 17 | "order": 0, 18 | "mode": 0, 19 | "outputs": [ 20 | { 21 | "name": "LATENT", 22 | "type": "LATENT", 23 | "links": [ 24 | 45 25 | ], 26 | "slot_index": 0 27 | } 28 | ], 29 | "properties": { 30 | "Node name for S&R": "EmptyLatentImage" 31 | }, 32 | "widgets_values": [ 33 | 1024, 34 | 1024, 35 | 1 36 | ], 37 | "color": "#323", 38 | "bgcolor": "#535" 39 | }, 40 | { 41 | "id": 6, 42 | "type": "CLIPTextEncode", 43 | "pos": [ 44 | 379, 45 | 208 46 | ], 47 | "size": { 48 | "0": 422.84503173828125, 49 | "1": 164.31304931640625 50 | }, 51 | "flags": {}, 52 | "order": 5, 53 | "mode": 0, 54 | "inputs": [ 55 | { 56 | "name": "clip", 57 | "type": "CLIP", 58 | "link": 10 59 | } 60 | ], 61 | "outputs": [ 62 | { 63 | "name": "CONDITIONING", 64 | "type": "CONDITIONING", 65 | "links": [ 66 | 43 67 | ], 68 | "slot_index": 0 69 | } 70 | ], 71 | "properties": { 72 | "Node name for S&R": "CLIPTextEncode" 73 | }, 74 | "widgets_values": [ 75 | "a cat wearing a yellow hat, hold a sign saying \"tech-practice\"\n\n\n" 76 | ], 77 | "color": "#232", 78 | "bgcolor": "#353" 79 | }, 80 | { 81 | "id": 8, 82 | "type": "VAEDecode", 83 | "pos": [ 84 | 1248, 85 | 192 86 | ], 87 | "size": { 88 | "0": 210, 89 | "1": 46 90 | }, 91 | "flags": {}, 92 | "order": 8, 93 | "mode": 0, 94 | "inputs": [ 95 | { 96 | "name": "samples", 97 | "type": "LATENT", 98 | "link": 44 99 | }, 100 | { 101 | "name": "vae", 102 | "type": "VAE", 103 | "link": 12 104 | } 105 | ], 106 | "outputs": [ 107 | { 108 | "name": "IMAGE", 109 | "type": "IMAGE", 110 | "links": [ 111 | 9 112 | ], 113 | "slot_index": 0 114 | } 115 | ], 116 | "properties": { 117 | "Node name for S&R": "VAEDecode" 118 | } 119 | }, 120 | { 121 | "id": 9, 122 | "type": "SaveImage", 123 | "pos": [ 124 | 1354, 125 | 336 126 | ], 127 | "size": { 128 | "0": 534.0767822265625, 129 | "1": 607.572265625 130 | }, 131 | "flags": {}, 132 | "order": 9, 133 | "mode": 0, 134 | "inputs": [ 135 | { 136 | "name": "images", 137 | "type": "IMAGE", 138 | "link": 9 139 | } 140 | ], 141 | "properties": {}, 142 | "widgets_values": [ 143 | "ComfyUI" 144 | ] 145 | }, 146 | { 147 | "id": 10, 148 | "type": "VAELoader", 149 | "pos": [ 150 | 38, 151 | 363 152 | ], 153 | "size": { 154 | "0": 315, 155 | "1": 58 156 | }, 157 | "flags": {}, 158 | "order": 4, 159 | "mode": 0, 160 | "outputs": [ 161 | { 162 | "name": "VAE", 163 | "type": "VAE", 164 | "links": [ 165 | 12 166 | ], 167 | "slot_index": 0, 168 | "shape": 3 169 | } 170 | ], 171 | "properties": { 172 | "Node name for S&R": "VAELoader" 173 | }, 174 | "widgets_values": [ 175 | "ae.safetensors" 176 | ] 177 | }, 178 | { 179 | "id": 11, 180 | "type": "DualCLIPLoader", 181 | "pos": [ 182 | 34, 183 | 135 184 | ], 185 | "size": { 186 | "0": 315, 187 | "1": 106 188 | }, 189 | "flags": {}, 190 | "order": 3, 191 | "mode": 0, 192 | "outputs": [ 193 | { 194 | "name": "CLIP", 195 | "type": "CLIP", 196 | "links": [ 197 | 10, 198 | 47 199 | ], 200 | "slot_index": 0, 201 | "shape": 3 202 | } 203 | ], 204 | "properties": { 205 | "Node name for S&R": "DualCLIPLoader" 206 | }, 207 | "widgets_values": [ 208 | "t5xxl_fp8_e4m3fn.safetensors", 209 | "clip_l.safetensors", 210 | "flux" 211 | ] 212 | }, 213 | { 214 | "id": 26, 215 | "type": "Note", 216 | "pos": [ 217 | 48, 218 | 480 219 | ], 220 | "size": { 221 | "0": 336, 222 | "1": 288 223 | }, 224 | "flags": {}, 225 | "order": 1, 226 | "mode": 0, 227 | "properties": { 228 | "text": "" 229 | }, 230 | "widgets_values": [ 231 | "If you get an error in any of the nodes above make sure the files are in the correct directories.\n\nSee the top of the examples page for the links : https://comfyanonymous.github.io/ComfyUI_examples/flux/\n\nflux1-schnell.safetensors goes in: ComfyUI/models/unet/\n\nt5xxl_fp16.safetensors and clip_l.safetensors go in: ComfyUI/models/clip/\n\nae.safetensors goes in: ComfyUI/models/vae/\n\n\nTip: You can set the weight_dtype above to one of the fp8 types if you have memory issues." 232 | ], 233 | "color": "#432", 234 | "bgcolor": "#653" 235 | }, 236 | { 237 | "id": 28, 238 | "type": "KSampler", 239 | "pos": [ 240 | 893, 241 | 368 242 | ], 243 | "size": { 244 | "0": 388.91705322265625, 245 | "1": 510.07452392578125 246 | }, 247 | "flags": {}, 248 | "order": 7, 249 | "mode": 0, 250 | "inputs": [ 251 | { 252 | "name": "model", 253 | "type": "MODEL", 254 | "link": 42 255 | }, 256 | { 257 | "name": "positive", 258 | "type": "CONDITIONING", 259 | "link": 43 260 | }, 261 | { 262 | "name": "negative", 263 | "type": "CONDITIONING", 264 | "link": 46 265 | }, 266 | { 267 | "name": "latent_image", 268 | "type": "LATENT", 269 | "link": 45 270 | } 271 | ], 272 | "outputs": [ 273 | { 274 | "name": "LATENT", 275 | "type": "LATENT", 276 | "links": [ 277 | 44 278 | ], 279 | "slot_index": 0, 280 | "shape": 3 281 | } 282 | ], 283 | "properties": { 284 | "Node name for S&R": "KSampler" 285 | }, 286 | "widgets_values": [ 287 | 17, 288 | "increment", 289 | 4, 290 | 1, 291 | "euler", 292 | "normal", 293 | 1 294 | ] 295 | }, 296 | { 297 | "id": 33, 298 | "type": "UnetLoaderGGUF", 299 | "pos": [ 300 | 427, 301 | 48 302 | ], 303 | "size": { 304 | "0": 315, 305 | "1": 58 306 | }, 307 | "flags": {}, 308 | "order": 2, 309 | "mode": 0, 310 | "outputs": [ 311 | { 312 | "name": "MODEL", 313 | "type": "MODEL", 314 | "links": [ 315 | 42 316 | ], 317 | "slot_index": 0, 318 | "shape": 3 319 | } 320 | ], 321 | "properties": { 322 | "Node name for S&R": "UnetLoaderGGUF" 323 | }, 324 | "widgets_values": [ 325 | "flux1-schnell-Q4_0.gguf" 326 | ] 327 | }, 328 | { 329 | "id": 34, 330 | "type": "CLIPTextEncode", 331 | "pos": [ 332 | 512, 333 | 596 334 | ], 335 | "size": { 336 | "0": 422.84503173828125, 337 | "1": 164.31304931640625 338 | }, 339 | "flags": { 340 | "collapsed": true 341 | }, 342 | "order": 6, 343 | "mode": 0, 344 | "inputs": [ 345 | { 346 | "name": "clip", 347 | "type": "CLIP", 348 | "link": 47, 349 | "slot_index": 0 350 | } 351 | ], 352 | "outputs": [ 353 | { 354 | "name": "CONDITIONING", 355 | "type": "CONDITIONING", 356 | "links": [ 357 | 46 358 | ], 359 | "slot_index": 0 360 | } 361 | ], 362 | "title": "CLIP Text Encode (Negative Prompt)", 363 | "properties": { 364 | "Node name for S&R": "CLIPTextEncode" 365 | }, 366 | "widgets_values": [ 367 | "" 368 | ], 369 | "color": "#322", 370 | "bgcolor": "#533" 371 | } 372 | ], 373 | "links": [ 374 | [ 375 | 9, 376 | 8, 377 | 0, 378 | 9, 379 | 0, 380 | "IMAGE" 381 | ], 382 | [ 383 | 10, 384 | 11, 385 | 0, 386 | 6, 387 | 0, 388 | "CLIP" 389 | ], 390 | [ 391 | 12, 392 | 10, 393 | 0, 394 | 8, 395 | 1, 396 | "VAE" 397 | ], 398 | [ 399 | 42, 400 | 33, 401 | 0, 402 | 28, 403 | 0, 404 | "MODEL" 405 | ], 406 | [ 407 | 43, 408 | 6, 409 | 0, 410 | 28, 411 | 1, 412 | "CONDITIONING" 413 | ], 414 | [ 415 | 44, 416 | 28, 417 | 0, 418 | 8, 419 | 0, 420 | "LATENT" 421 | ], 422 | [ 423 | 45, 424 | 5, 425 | 0, 426 | 28, 427 | 3, 428 | "LATENT" 429 | ], 430 | [ 431 | 46, 432 | 34, 433 | 0, 434 | 28, 435 | 2, 436 | "CONDITIONING" 437 | ], 438 | [ 439 | 47, 440 | 11, 441 | 0, 442 | 34, 443 | 0, 444 | "CLIP" 445 | ] 446 | ], 447 | "groups": [], 448 | "config": {}, 449 | "extra": { 450 | "ds": { 451 | "scale": 0.6830134553650707, 452 | "offset": [ 453 | 172.52140228245526, 454 | 97.3584616425668 455 | ] 456 | } 457 | }, 458 | "version": 0.4 459 | } -------------------------------------------------------------------------------- /hunyuanvideo_t2v.json: -------------------------------------------------------------------------------- 1 | { 2 | "last_node_id": 41, 3 | "last_link_id": 50, 4 | "nodes": [ 5 | { 6 | "id": 3, 7 | "type": "HyVideoSampler", 8 | "pos": [ 9 | 266, 10 | -141 11 | ], 12 | "size": [ 13 | 315, 14 | 314 15 | ], 16 | "flags": {}, 17 | "order": 5, 18 | "mode": 0, 19 | "inputs": [ 20 | { 21 | "name": "model", 22 | "type": "HYVIDEOMODEL", 23 | "link": 2 24 | }, 25 | { 26 | "name": "hyvid_embeds", 27 | "type": "HYVIDEMBEDS", 28 | "link": 36 29 | }, 30 | { 31 | "name": "samples", 32 | "type": "LATENT", 33 | "link": null, 34 | "shape": 7 35 | } 36 | ], 37 | "outputs": [ 38 | { 39 | "name": "samples", 40 | "type": "LATENT", 41 | "links": [ 42 | 4 43 | ], 44 | "slot_index": 0 45 | } 46 | ], 47 | "properties": { 48 | "Node name for S&R": "HyVideoSampler" 49 | }, 50 | "widgets_values": [ 51 | 512, 52 | 320, 53 | 85, 54 | 30, 55 | 6, 56 | 9, 57 | 6, 58 | "fixed", 59 | 1, 60 | 1 61 | ] 62 | }, 63 | { 64 | "id": 5, 65 | "type": "HyVideoDecode", 66 | "pos": [ 67 | 651, 68 | -285 69 | ], 70 | "size": [ 71 | 345.4285888671875, 72 | 102 73 | ], 74 | "flags": {}, 75 | "order": 6, 76 | "mode": 0, 77 | "inputs": [ 78 | { 79 | "name": "vae", 80 | "type": "VAE", 81 | "link": 6 82 | }, 83 | { 84 | "name": "samples", 85 | "type": "LATENT", 86 | "link": 4 87 | } 88 | ], 89 | "outputs": [ 90 | { 91 | "name": "images", 92 | "type": "IMAGE", 93 | "links": [ 94 | 42 95 | ], 96 | "slot_index": 0 97 | } 98 | ], 99 | "properties": { 100 | "Node name for S&R": "HyVideoDecode" 101 | }, 102 | "widgets_values": [ 103 | true, 104 | 8 105 | ] 106 | }, 107 | { 108 | "id": 1, 109 | "type": "HyVideoModelLoader", 110 | "pos": [ 111 | -285, 112 | -94 113 | ], 114 | "size": [ 115 | 426.1773986816406, 116 | 174 117 | ], 118 | "flags": {}, 119 | "order": 0, 120 | "mode": 0, 121 | "inputs": [ 122 | { 123 | "name": "compile_args", 124 | "type": "COMPILEARGS", 125 | "link": null, 126 | "shape": 7 127 | }, 128 | { 129 | "name": "block_swap_args", 130 | "type": "BLOCKSWAPARGS", 131 | "link": null, 132 | "shape": 7 133 | } 134 | ], 135 | "outputs": [ 136 | { 137 | "name": "model", 138 | "type": "HYVIDEOMODEL", 139 | "links": [ 140 | 2 141 | ], 142 | "slot_index": 0 143 | } 144 | ], 145 | "properties": { 146 | "Node name for S&R": "HyVideoModelLoader" 147 | }, 148 | "widgets_values": [ 149 | "hunyuan_video_720_cfgdistill_fp8_e4m3fn.safetensors", 150 | "bf16", 151 | "fp8_e4m3fn", 152 | "offload_device", 153 | "sageattn_varlen" 154 | ] 155 | }, 156 | { 157 | "id": 7, 158 | "type": "HyVideoVAELoader", 159 | "pos": [ 160 | -277, 161 | -284 162 | ], 163 | "size": [ 164 | 379.166748046875, 165 | 82 166 | ], 167 | "flags": {}, 168 | "order": 1, 169 | "mode": 0, 170 | "inputs": [ 171 | { 172 | "name": "compile_args", 173 | "type": "COMPILEARGS", 174 | "link": null, 175 | "shape": 7 176 | } 177 | ], 178 | "outputs": [ 179 | { 180 | "name": "vae", 181 | "type": "VAE", 182 | "links": [ 183 | 6 184 | ], 185 | "slot_index": 0 186 | } 187 | ], 188 | "properties": { 189 | "Node name for S&R": "HyVideoVAELoader" 190 | }, 191 | "widgets_values": [ 192 | "hunyuan_video_vae_fp32.safetensors", 193 | "fp16" 194 | ] 195 | }, 196 | { 197 | "id": 16, 198 | "type": "DownloadAndLoadHyVideoTextEncoder", 199 | "pos": [ 200 | -312, 201 | 243 202 | ], 203 | "size": [ 204 | 441, 205 | 154 206 | ], 207 | "flags": {}, 208 | "order": 2, 209 | "mode": 0, 210 | "inputs": [], 211 | "outputs": [ 212 | { 213 | "name": "hyvid_text_encoder", 214 | "type": "HYVIDTEXTENCODER", 215 | "links": [ 216 | 35 217 | ] 218 | } 219 | ], 220 | "properties": { 221 | "Node name for S&R": "DownloadAndLoadHyVideoTextEncoder" 222 | }, 223 | "widgets_values": [ 224 | "Kijai/llava-llama-3-8b-text-encoder-tokenizer", 225 | "openai/clip-vit-large-patch14", 226 | "fp16", 227 | false, 228 | 2 229 | ] 230 | }, 231 | { 232 | "id": 34, 233 | "type": "VHS_VideoCombine", 234 | "pos": [ 235 | 644.4561767578125, 236 | -119.98246765136719 237 | ], 238 | "size": [ 239 | 344.8273620605469, 240 | 527.01708984375 241 | ], 242 | "flags": {}, 243 | "order": 7, 244 | "mode": 0, 245 | "inputs": [ 246 | { 247 | "name": "images", 248 | "type": "IMAGE", 249 | "link": 42 250 | }, 251 | { 252 | "name": "audio", 253 | "type": "AUDIO", 254 | "link": null, 255 | "shape": 7 256 | }, 257 | { 258 | "name": "meta_batch", 259 | "type": "VHS_BatchManager", 260 | "link": null, 261 | "shape": 7 262 | }, 263 | { 264 | "name": "vae", 265 | "type": "VAE", 266 | "link": null, 267 | "shape": 7 268 | } 269 | ], 270 | "outputs": [ 271 | { 272 | "name": "Filenames", 273 | "type": "VHS_FILENAMES", 274 | "links": null 275 | } 276 | ], 277 | "properties": { 278 | "Node name for S&R": "VHS_VideoCombine" 279 | }, 280 | "widgets_values": { 281 | "frame_rate": 16, 282 | "loop_count": 0, 283 | "filename_prefix": "HunyuanVideo", 284 | "format": "video/h264-mp4", 285 | "pix_fmt": "yuv420p", 286 | "crf": 19, 287 | "save_metadata": true, 288 | "pingpong": false, 289 | "save_output": false, 290 | "videopreview": { 291 | "hidden": false, 292 | "paused": false, 293 | "params": { 294 | "filename": "HunyuanVideo_00002.mp4", 295 | "subfolder": "", 296 | "type": "temp", 297 | "format": "video/h264-mp4", 298 | "frame_rate": 16 299 | }, 300 | "muted": false 301 | } 302 | } 303 | }, 304 | { 305 | "id": 41, 306 | "type": "Note", 307 | "pos": [ 308 | -731.7229614257812, 309 | -266.8678894042969 310 | ], 311 | "size": [ 312 | 429.2561950683594, 313 | 238.14508056640625 314 | ], 315 | "flags": {}, 316 | "order": 3, 317 | "mode": 0, 318 | "inputs": [], 319 | "outputs": [], 320 | "properties": {}, 321 | "widgets_values": [ 322 | "For video tutorial, check out my youtube channel; \n\nhttps://www.youtube.com/@tech-practice9805\n\nNode location: https://github.com/kijai/ComfyUI-HunyuanVideoWrapper?tab=readme-ov-file" 323 | ], 324 | "color": "#432", 325 | "bgcolor": "#653" 326 | }, 327 | { 328 | "id": 30, 329 | "type": "HyVideoTextEncode", 330 | "pos": [ 331 | 179, 332 | 242 333 | ], 334 | "size": [ 335 | 408.91546630859375, 336 | 172.84060668945312 337 | ], 338 | "flags": {}, 339 | "order": 4, 340 | "mode": 0, 341 | "inputs": [ 342 | { 343 | "name": "text_encoders", 344 | "type": "HYVIDTEXTENCODER", 345 | "link": 35 346 | } 347 | ], 348 | "outputs": [ 349 | { 350 | "name": "hyvid_embeds", 351 | "type": "HYVIDEMBEDS", 352 | "links": [ 353 | 36 354 | ] 355 | } 356 | ], 357 | "properties": { 358 | "Node name for S&R": "HyVideoTextEncode" 359 | }, 360 | "widgets_values": [ 361 | "An AI-augmented fighter pilot navigates a gravitational combat zone where reality itself becomes a weapon", 362 | "bad quality video", 363 | true 364 | ] 365 | } 366 | ], 367 | "links": [ 368 | [ 369 | 2, 370 | 1, 371 | 0, 372 | 3, 373 | 0, 374 | "HYVIDEOMODEL" 375 | ], 376 | [ 377 | 4, 378 | 3, 379 | 0, 380 | 5, 381 | 1, 382 | "LATENT" 383 | ], 384 | [ 385 | 6, 386 | 7, 387 | 0, 388 | 5, 389 | 0, 390 | "VAE" 391 | ], 392 | [ 393 | 35, 394 | 16, 395 | 0, 396 | 30, 397 | 0, 398 | "HYVIDTEXTENCODER" 399 | ], 400 | [ 401 | 36, 402 | 30, 403 | 0, 404 | 3, 405 | 1, 406 | "HYVIDEMBEDS" 407 | ], 408 | [ 409 | 42, 410 | 5, 411 | 0, 412 | 34, 413 | 0, 414 | "IMAGE" 415 | ] 416 | ], 417 | "groups": [], 418 | "config": {}, 419 | "extra": { 420 | "ds": { 421 | "scale": 0.7247295000000012, 422 | "offset": [ 423 | 1269.3196264944804, 424 | 465.09272734534943 425 | ] 426 | }, 427 | "workspace_info": { 428 | "id": "JzupXeKDJtB5bE-A18ROI" 429 | } 430 | }, 431 | "version": 0.4 432 | } -------------------------------------------------------------------------------- /wan2.1_Macbook_text2video_1.3B.json: -------------------------------------------------------------------------------- 1 | { 2 | "last_node_id": 53, 3 | "last_link_id": 98, 4 | "nodes": [ 5 | { 6 | "id": 7, 7 | "type": "CLIPTextEncode", 8 | "pos": [ 9 | 413, 10 | 389 11 | ], 12 | "size": [ 13 | 425.27801513671875, 14 | 180.6060791015625 15 | ], 16 | "flags": {}, 17 | "order": 7, 18 | "mode": 0, 19 | "inputs": [ 20 | { 21 | "name": "clip", 22 | "type": "CLIP", 23 | "link": 75 24 | } 25 | ], 26 | "outputs": [ 27 | { 28 | "name": "CONDITIONING", 29 | "type": "CONDITIONING", 30 | "links": [ 31 | 52 32 | ], 33 | "slot_index": 0 34 | } 35 | ], 36 | "title": "CLIP Text Encode (Negative Prompt)", 37 | "properties": { 38 | "Node name for S&R": "CLIPTextEncode" 39 | }, 40 | "widgets_values": [ 41 | "色调艳丽,过曝,静态,细节模糊不清,字幕,风格,作品,画作,画面,静止,整体发灰,最差质量,低质量,JPEG压缩残留,丑陋的,残缺的,多余的手指,画得不好的手部,画得不好的脸部,畸形的,毁容的,形态畸形的肢体,手指融合,静止不动的画面,杂乱的背景,三条腿,背景人很多,倒着走" 42 | ], 43 | "color": "#322", 44 | "bgcolor": "#533" 45 | }, 46 | { 47 | "id": 48, 48 | "type": "ModelSamplingSD3", 49 | "pos": [ 50 | 440, 51 | 50 52 | ], 53 | "size": [ 54 | 210, 55 | 58 56 | ], 57 | "flags": {}, 58 | "order": 9, 59 | "mode": 0, 60 | "inputs": [ 61 | { 62 | "name": "model", 63 | "type": "MODEL", 64 | "link": 98 65 | } 66 | ], 67 | "outputs": [ 68 | { 69 | "name": "MODEL", 70 | "type": "MODEL", 71 | "links": [ 72 | 95 73 | ], 74 | "slot_index": 0 75 | } 76 | ], 77 | "properties": { 78 | "Node name for S&R": "ModelSamplingSD3" 79 | }, 80 | "widgets_values": [ 81 | 8 82 | ] 83 | }, 84 | { 85 | "id": 39, 86 | "type": "VAELoader", 87 | "pos": [ 88 | 866.3932495117188, 89 | 499.18597412109375 90 | ], 91 | "size": [ 92 | 306.36004638671875, 93 | 58 94 | ], 95 | "flags": {}, 96 | "order": 0, 97 | "mode": 0, 98 | "inputs": [], 99 | "outputs": [ 100 | { 101 | "name": "VAE", 102 | "type": "VAE", 103 | "links": [ 104 | 76 105 | ], 106 | "slot_index": 0 107 | } 108 | ], 109 | "properties": { 110 | "Node name for S&R": "VAELoader" 111 | }, 112 | "widgets_values": [ 113 | "wan_2.1_vae.safetensors" 114 | ] 115 | }, 116 | { 117 | "id": 8, 118 | "type": "VAEDecode", 119 | "pos": [ 120 | 1210, 121 | 190 122 | ], 123 | "size": [ 124 | 210, 125 | 46 126 | ], 127 | "flags": {}, 128 | "order": 11, 129 | "mode": 0, 130 | "inputs": [ 131 | { 132 | "name": "samples", 133 | "type": "LATENT", 134 | "link": 35 135 | }, 136 | { 137 | "name": "vae", 138 | "type": "VAE", 139 | "link": 76 140 | } 141 | ], 142 | "outputs": [ 143 | { 144 | "name": "IMAGE", 145 | "type": "IMAGE", 146 | "links": [ 147 | 56, 148 | 96 149 | ], 150 | "slot_index": 0 151 | } 152 | ], 153 | "properties": { 154 | "Node name for S&R": "VAEDecode" 155 | }, 156 | "widgets_values": [] 157 | }, 158 | { 159 | "id": 49, 160 | "type": "VHS_VideoCombine", 161 | "pos": [ 162 | 1542.4951171875, 163 | 7.454583644866943 164 | ], 165 | "size": [ 166 | 376.1499938964844, 167 | 680.1500244140625 168 | ], 169 | "flags": {}, 170 | "order": 13, 171 | "mode": 0, 172 | "inputs": [ 173 | { 174 | "name": "images", 175 | "type": "IMAGE", 176 | "link": 96 177 | }, 178 | { 179 | "name": "audio", 180 | "type": "AUDIO", 181 | "shape": 7, 182 | "link": null 183 | }, 184 | { 185 | "name": "meta_batch", 186 | "type": "VHS_BatchManager", 187 | "shape": 7, 188 | "link": null 189 | }, 190 | { 191 | "name": "vae", 192 | "type": "VAE", 193 | "shape": 7, 194 | "link": null 195 | } 196 | ], 197 | "outputs": [ 198 | { 199 | "name": "Filenames", 200 | "type": "VHS_FILENAMES", 201 | "links": null 202 | } 203 | ], 204 | "properties": { 205 | "Node name for S&R": "VHS_VideoCombine" 206 | }, 207 | "widgets_values": { 208 | "frame_rate": 16, 209 | "loop_count": 0, 210 | "filename_prefix": "wan/videos", 211 | "format": "video/h264-mp4", 212 | "pix_fmt": "yuv420p", 213 | "crf": 19, 214 | "save_metadata": true, 215 | "pingpong": false, 216 | "save_output": true, 217 | "videopreview": { 218 | "hidden": false, 219 | "paused": false, 220 | "params": { 221 | "filename": "videos_00005.mp4", 222 | "subfolder": "wan", 223 | "type": "output", 224 | "format": "video/h264-mp4", 225 | "frame_rate": 16, 226 | "workflow": "videos_00004.png", 227 | "fullpath": "/home/ti/ComfyUI/output/Hunyuan/videos_00004.mp4" 228 | }, 229 | "muted": false 230 | } 231 | } 232 | }, 233 | { 234 | "id": 51, 235 | "type": "Note", 236 | "pos": [ 237 | 20.718666076660156, 238 | 459.1051940917969 239 | ], 240 | "size": [ 241 | 234.0926055908203, 242 | 467.5384826660156 243 | ], 244 | "flags": {}, 245 | "order": 1, 246 | "mode": 0, 247 | "inputs": [], 248 | "outputs": [], 249 | "properties": { 250 | "text": "" 251 | }, 252 | "widgets_values": [ 253 | "\nYoutube channel: https://www.youtube.com/@tech-practice9805\n\n\n\nWan 2.1 text to video. \n\nFor mdoel download, see https://comfyanonymous.github.io/ComfyUI_examples/wan/\n\nCan also use the 14B model" 254 | ], 255 | "color": "#432", 256 | "bgcolor": "#653" 257 | }, 258 | { 259 | "id": 38, 260 | "type": "CLIPLoader", 261 | "pos": [ 262 | -24.413658142089844, 263 | 234.5161895751953 264 | ], 265 | "size": [ 266 | 390, 267 | 98 268 | ], 269 | "flags": {}, 270 | "order": 2, 271 | "mode": 0, 272 | "inputs": [], 273 | "outputs": [ 274 | { 275 | "name": "CLIP", 276 | "type": "CLIP", 277 | "links": [ 278 | 74, 279 | 75 280 | ], 281 | "slot_index": 0 282 | } 283 | ], 284 | "properties": { 285 | "Node name for S&R": "CLIPLoader" 286 | }, 287 | "widgets_values": [ 288 | "umt5_xxl_fp8_e4m3fn_scaled.safetensors", 289 | "wan", 290 | "default" 291 | ] 292 | }, 293 | { 294 | "id": 40, 295 | "type": "EmptyHunyuanLatentVideo", 296 | "pos": [ 297 | 520, 298 | 620 299 | ], 300 | "size": [ 301 | 315, 302 | 130 303 | ], 304 | "flags": {}, 305 | "order": 3, 306 | "mode": 0, 307 | "inputs": [], 308 | "outputs": [ 309 | { 310 | "name": "LATENT", 311 | "type": "LATENT", 312 | "links": [ 313 | 91 314 | ], 315 | "slot_index": 0 316 | } 317 | ], 318 | "properties": { 319 | "Node name for S&R": "EmptyHunyuanLatentVideo" 320 | }, 321 | "widgets_values": [ 322 | 512, 323 | 512, 324 | 33, 325 | 1 326 | ] 327 | }, 328 | { 329 | "id": 28, 330 | "type": "SaveAnimatedWEBP", 331 | "pos": [ 332 | 895.359619140625, 333 | 742.9389038085938 334 | ], 335 | "size": [ 336 | 870.8511352539062, 337 | 643.7430419921875 338 | ], 339 | "flags": {}, 340 | "order": 12, 341 | "mode": 0, 342 | "inputs": [ 343 | { 344 | "name": "images", 345 | "type": "IMAGE", 346 | "link": 56 347 | } 348 | ], 349 | "outputs": [], 350 | "properties": {}, 351 | "widgets_values": [ 352 | "ComfyUI", 353 | 16, 354 | false, 355 | 90, 356 | "default", 357 | "" 358 | ] 359 | }, 360 | { 361 | "id": 6, 362 | "type": "CLIPTextEncode", 363 | "pos": [ 364 | 415, 365 | 186 366 | ], 367 | "size": [ 368 | 422.84503173828125, 369 | 164.31304931640625 370 | ], 371 | "flags": {}, 372 | "order": 6, 373 | "mode": 0, 374 | "inputs": [ 375 | { 376 | "name": "clip", 377 | "type": "CLIP", 378 | "link": 74 379 | } 380 | ], 381 | "outputs": [ 382 | { 383 | "name": "CONDITIONING", 384 | "type": "CONDITIONING", 385 | "links": [ 386 | 46 387 | ], 388 | "slot_index": 0 389 | } 390 | ], 391 | "title": "CLIP Text Encode (Positive Prompt)", 392 | "properties": { 393 | "Node name for S&R": "CLIPTextEncode" 394 | }, 395 | "widgets_values": [ 396 | "a tiger moving quickly in a beautiful winter scenery nature trees mountains daytime tracking camera" 397 | ], 398 | "color": "#232", 399 | "bgcolor": "#353" 400 | }, 401 | { 402 | "id": 52, 403 | "type": "Note", 404 | "pos": [ 405 | 939.7559204101562, 406 | 42.57202911376953 407 | ], 408 | "size": [ 409 | 210, 410 | 88 411 | ], 412 | "flags": {}, 413 | "order": 4, 414 | "mode": 0, 415 | "inputs": [], 416 | "outputs": [], 417 | "properties": {}, 418 | "widgets_values": [ 419 | "the uni_pc sampler doesnot seem to work on Macbook. use others. " 420 | ], 421 | "color": "#432", 422 | "bgcolor": "#653" 423 | }, 424 | { 425 | "id": 37, 426 | "type": "UNETLoader", 427 | "pos": [ 428 | -165.18907165527344, 429 | -82.10840606689453 430 | ], 431 | "size": [ 432 | 346.7470703125, 433 | 82 434 | ], 435 | "flags": {}, 436 | "order": 5, 437 | "mode": 0, 438 | "inputs": [], 439 | "outputs": [ 440 | { 441 | "name": "MODEL", 442 | "type": "MODEL", 443 | "links": [ 444 | 97 445 | ], 446 | "slot_index": 0 447 | } 448 | ], 449 | "properties": { 450 | "Node name for S&R": "UNETLoader" 451 | }, 452 | "widgets_values": [ 453 | "wan2.1_t2v_1.3B_fp16.safetensors", 454 | "default" 455 | ] 456 | }, 457 | { 458 | "id": 53, 459 | "type": "TeaCache", 460 | "pos": [ 461 | 207.71543884277344, 462 | -176.64788818359375 463 | ], 464 | "size": [ 465 | 315, 466 | 106 467 | ], 468 | "flags": {}, 469 | "order": 8, 470 | "mode": 0, 471 | "inputs": [ 472 | { 473 | "name": "model", 474 | "type": "MODEL", 475 | "link": 97 476 | } 477 | ], 478 | "outputs": [ 479 | { 480 | "name": "model", 481 | "type": "MODEL", 482 | "links": [ 483 | 98 484 | ], 485 | "slot_index": 0 486 | } 487 | ], 488 | "properties": { 489 | "Node name for S&R": "TeaCache" 490 | }, 491 | "widgets_values": [ 492 | "wan2.1_t2v_1.3B", 493 | 0.4, 494 | 3 495 | ] 496 | }, 497 | { 498 | "id": 3, 499 | "type": "KSampler", 500 | "pos": [ 501 | 863, 502 | 187 503 | ], 504 | "size": [ 505 | 315, 506 | 262 507 | ], 508 | "flags": {}, 509 | "order": 10, 510 | "mode": 0, 511 | "inputs": [ 512 | { 513 | "name": "model", 514 | "type": "MODEL", 515 | "link": 95 516 | }, 517 | { 518 | "name": "positive", 519 | "type": "CONDITIONING", 520 | "link": 46 521 | }, 522 | { 523 | "name": "negative", 524 | "type": "CONDITIONING", 525 | "link": 52 526 | }, 527 | { 528 | "name": "latent_image", 529 | "type": "LATENT", 530 | "link": 91 531 | } 532 | ], 533 | "outputs": [ 534 | { 535 | "name": "LATENT", 536 | "type": "LATENT", 537 | "links": [ 538 | 35 539 | ], 540 | "slot_index": 0 541 | } 542 | ], 543 | "properties": { 544 | "Node name for S&R": "KSampler" 545 | }, 546 | "widgets_values": [ 547 | 387609183938926, 548 | "increment", 549 | 30, 550 | 6, 551 | "euler", 552 | "simple", 553 | 1 554 | ] 555 | } 556 | ], 557 | "links": [ 558 | [ 559 | 35, 560 | 3, 561 | 0, 562 | 8, 563 | 0, 564 | "LATENT" 565 | ], 566 | [ 567 | 46, 568 | 6, 569 | 0, 570 | 3, 571 | 1, 572 | "CONDITIONING" 573 | ], 574 | [ 575 | 52, 576 | 7, 577 | 0, 578 | 3, 579 | 2, 580 | "CONDITIONING" 581 | ], 582 | [ 583 | 56, 584 | 8, 585 | 0, 586 | 28, 587 | 0, 588 | "IMAGE" 589 | ], 590 | [ 591 | 74, 592 | 38, 593 | 0, 594 | 6, 595 | 0, 596 | "CLIP" 597 | ], 598 | [ 599 | 75, 600 | 38, 601 | 0, 602 | 7, 603 | 0, 604 | "CLIP" 605 | ], 606 | [ 607 | 76, 608 | 39, 609 | 0, 610 | 8, 611 | 1, 612 | "VAE" 613 | ], 614 | [ 615 | 91, 616 | 40, 617 | 0, 618 | 3, 619 | 3, 620 | "LATENT" 621 | ], 622 | [ 623 | 95, 624 | 48, 625 | 0, 626 | 3, 627 | 0, 628 | "MODEL" 629 | ], 630 | [ 631 | 96, 632 | 8, 633 | 0, 634 | 49, 635 | 0, 636 | "IMAGE" 637 | ], 638 | [ 639 | 97, 640 | 37, 641 | 0, 642 | 53, 643 | 0, 644 | "MODEL" 645 | ], 646 | [ 647 | 98, 648 | 53, 649 | 0, 650 | 48, 651 | 0, 652 | "MODEL" 653 | ] 654 | ], 655 | "groups": [], 656 | "config": {}, 657 | "extra": { 658 | "ds": { 659 | "scale": 0.8140274938684025, 660 | "offset": [ 661 | 292.69622576627853, 662 | 175.16916277465572 663 | ] 664 | }, 665 | "node_versions": { 666 | "comfy-core": "0.3.14" 667 | }, 668 | "VHS_latentpreview": false, 669 | "VHS_latentpreviewrate": 0, 670 | "VHS_MetadataImage": true, 671 | "VHS_KeepIntermediate": true 672 | }, 673 | "version": 0.4 674 | } -------------------------------------------------------------------------------- /wan2.1_text2video_1.3B.json: -------------------------------------------------------------------------------- 1 | { 2 | "last_node_id": 51, 3 | "last_link_id": 96, 4 | "nodes": [ 5 | { 6 | "id": 7, 7 | "type": "CLIPTextEncode", 8 | "pos": [ 9 | 413, 10 | 389 11 | ], 12 | "size": [ 13 | 425.27801513671875, 14 | 180.6060791015625 15 | ], 16 | "flags": {}, 17 | "order": 7, 18 | "mode": 0, 19 | "inputs": [ 20 | { 21 | "name": "clip", 22 | "type": "CLIP", 23 | "link": 75 24 | } 25 | ], 26 | "outputs": [ 27 | { 28 | "name": "CONDITIONING", 29 | "type": "CONDITIONING", 30 | "links": [ 31 | 52 32 | ], 33 | "slot_index": 0 34 | } 35 | ], 36 | "title": "CLIP Text Encode (Negative Prompt)", 37 | "properties": { 38 | "Node name for S&R": "CLIPTextEncode" 39 | }, 40 | "widgets_values": [ 41 | "色调艳丽,过曝,静态,细节模糊不清,字幕,风格,作品,画作,画面,静止,整体发灰,最差质量,低质量,JPEG压缩残留,丑陋的,残缺的,多余的手指,画得不好的手部,画得不好的脸部,畸形的,毁容的,形态畸形的肢体,手指融合,静止不动的画面,杂乱的背景,三条腿,背景人很多,倒着走" 42 | ], 43 | "color": "#322", 44 | "bgcolor": "#533" 45 | }, 46 | { 47 | "id": 40, 48 | "type": "EmptyHunyuanLatentVideo", 49 | "pos": [ 50 | 520, 51 | 620 52 | ], 53 | "size": [ 54 | 315, 55 | 130 56 | ], 57 | "flags": {}, 58 | "order": 0, 59 | "mode": 0, 60 | "inputs": [], 61 | "outputs": [ 62 | { 63 | "name": "LATENT", 64 | "type": "LATENT", 65 | "links": [ 66 | 91 67 | ], 68 | "slot_index": 0 69 | } 70 | ], 71 | "properties": { 72 | "Node name for S&R": "EmptyHunyuanLatentVideo" 73 | }, 74 | "widgets_values": [ 75 | 832, 76 | 480, 77 | 33, 78 | 1 79 | ] 80 | }, 81 | { 82 | "id": 3, 83 | "type": "KSampler", 84 | "pos": [ 85 | 863, 86 | 187 87 | ], 88 | "size": [ 89 | 315, 90 | 262 91 | ], 92 | "flags": {}, 93 | "order": 8, 94 | "mode": 0, 95 | "inputs": [ 96 | { 97 | "name": "model", 98 | "type": "MODEL", 99 | "link": 95 100 | }, 101 | { 102 | "name": "positive", 103 | "type": "CONDITIONING", 104 | "link": 46 105 | }, 106 | { 107 | "name": "negative", 108 | "type": "CONDITIONING", 109 | "link": 52 110 | }, 111 | { 112 | "name": "latent_image", 113 | "type": "LATENT", 114 | "link": 91 115 | } 116 | ], 117 | "outputs": [ 118 | { 119 | "name": "LATENT", 120 | "type": "LATENT", 121 | "links": [ 122 | 35 123 | ], 124 | "slot_index": 0 125 | } 126 | ], 127 | "properties": { 128 | "Node name for S&R": "KSampler" 129 | }, 130 | "widgets_values": [ 131 | 465073202943274, 132 | "randomize", 133 | 30, 134 | 6, 135 | "uni_pc", 136 | "simple", 137 | 1 138 | ] 139 | }, 140 | { 141 | "id": 48, 142 | "type": "ModelSamplingSD3", 143 | "pos": [ 144 | 440, 145 | 50 146 | ], 147 | "size": [ 148 | 210, 149 | 58 150 | ], 151 | "flags": {}, 152 | "order": 5, 153 | "mode": 0, 154 | "inputs": [ 155 | { 156 | "name": "model", 157 | "type": "MODEL", 158 | "link": 94 159 | } 160 | ], 161 | "outputs": [ 162 | { 163 | "name": "MODEL", 164 | "type": "MODEL", 165 | "links": [ 166 | 95 167 | ], 168 | "slot_index": 0 169 | } 170 | ], 171 | "properties": { 172 | "Node name for S&R": "ModelSamplingSD3" 173 | }, 174 | "widgets_values": [ 175 | 8 176 | ] 177 | }, 178 | { 179 | "id": 37, 180 | "type": "UNETLoader", 181 | "pos": [ 182 | 20, 183 | 40 184 | ], 185 | "size": [ 186 | 346.7470703125, 187 | 82 188 | ], 189 | "flags": {}, 190 | "order": 1, 191 | "mode": 0, 192 | "inputs": [], 193 | "outputs": [ 194 | { 195 | "name": "MODEL", 196 | "type": "MODEL", 197 | "links": [ 198 | 94 199 | ], 200 | "slot_index": 0 201 | } 202 | ], 203 | "properties": { 204 | "Node name for S&R": "UNETLoader" 205 | }, 206 | "widgets_values": [ 207 | "wan2.1_t2v_1.3B_bf16.safetensors", 208 | "default" 209 | ] 210 | }, 211 | { 212 | "id": 39, 213 | "type": "VAELoader", 214 | "pos": [ 215 | 866.3932495117188, 216 | 499.18597412109375 217 | ], 218 | "size": [ 219 | 306.36004638671875, 220 | 58 221 | ], 222 | "flags": {}, 223 | "order": 2, 224 | "mode": 0, 225 | "inputs": [], 226 | "outputs": [ 227 | { 228 | "name": "VAE", 229 | "type": "VAE", 230 | "links": [ 231 | 76 232 | ], 233 | "slot_index": 0 234 | } 235 | ], 236 | "properties": { 237 | "Node name for S&R": "VAELoader" 238 | }, 239 | "widgets_values": [ 240 | "wan_2.1_vae.safetensors" 241 | ] 242 | }, 243 | { 244 | "id": 38, 245 | "type": "CLIPLoader", 246 | "pos": [ 247 | -24.413658142089844, 248 | 234.5161895751953 249 | ], 250 | "size": [ 251 | 390, 252 | 98 253 | ], 254 | "flags": {}, 255 | "order": 3, 256 | "mode": 0, 257 | "inputs": [], 258 | "outputs": [ 259 | { 260 | "name": "CLIP", 261 | "type": "CLIP", 262 | "links": [ 263 | 74, 264 | 75 265 | ], 266 | "slot_index": 0 267 | } 268 | ], 269 | "properties": { 270 | "Node name for S&R": "CLIPLoader" 271 | }, 272 | "widgets_values": [ 273 | "umt5_xxl_fp8_e4m3fn_scaled.safetensors", 274 | "wan", 275 | "default" 276 | ] 277 | }, 278 | { 279 | "id": 6, 280 | "type": "CLIPTextEncode", 281 | "pos": [ 282 | 415, 283 | 186 284 | ], 285 | "size": [ 286 | 422.84503173828125, 287 | 164.31304931640625 288 | ], 289 | "flags": {}, 290 | "order": 6, 291 | "mode": 0, 292 | "inputs": [ 293 | { 294 | "name": "clip", 295 | "type": "CLIP", 296 | "link": 74 297 | } 298 | ], 299 | "outputs": [ 300 | { 301 | "name": "CONDITIONING", 302 | "type": "CONDITIONING", 303 | "links": [ 304 | 46 305 | ], 306 | "slot_index": 0 307 | } 308 | ], 309 | "title": "CLIP Text Encode (Positive Prompt)", 310 | "properties": { 311 | "Node name for S&R": "CLIPTextEncode" 312 | }, 313 | "widgets_values": [ 314 | "a cat moving quickly in a beautiful winter scenery nature trees mountains daytime tracking camera" 315 | ], 316 | "color": "#232", 317 | "bgcolor": "#353" 318 | }, 319 | { 320 | "id": 8, 321 | "type": "VAEDecode", 322 | "pos": [ 323 | 1210, 324 | 190 325 | ], 326 | "size": [ 327 | 210, 328 | 46 329 | ], 330 | "flags": {}, 331 | "order": 9, 332 | "mode": 0, 333 | "inputs": [ 334 | { 335 | "name": "samples", 336 | "type": "LATENT", 337 | "link": 35 338 | }, 339 | { 340 | "name": "vae", 341 | "type": "VAE", 342 | "link": 76 343 | } 344 | ], 345 | "outputs": [ 346 | { 347 | "name": "IMAGE", 348 | "type": "IMAGE", 349 | "links": [ 350 | 56, 351 | 96 352 | ], 353 | "slot_index": 0 354 | } 355 | ], 356 | "properties": { 357 | "Node name for S&R": "VAEDecode" 358 | }, 359 | "widgets_values": [] 360 | }, 361 | { 362 | "id": 49, 363 | "type": "VHS_VideoCombine", 364 | "pos": [ 365 | 1542.4951171875, 366 | 7.454583644866943 367 | ], 368 | "size": [ 369 | 376.1499938964844, 370 | 334 371 | ], 372 | "flags": {}, 373 | "order": 11, 374 | "mode": 0, 375 | "inputs": [ 376 | { 377 | "name": "images", 378 | "type": "IMAGE", 379 | "link": 96 380 | }, 381 | { 382 | "name": "audio", 383 | "type": "AUDIO", 384 | "shape": 7, 385 | "link": null 386 | }, 387 | { 388 | "name": "meta_batch", 389 | "type": "VHS_BatchManager", 390 | "shape": 7, 391 | "link": null 392 | }, 393 | { 394 | "name": "vae", 395 | "type": "VAE", 396 | "shape": 7, 397 | "link": null 398 | } 399 | ], 400 | "outputs": [ 401 | { 402 | "name": "Filenames", 403 | "type": "VHS_FILENAMES", 404 | "links": null 405 | } 406 | ], 407 | "properties": { 408 | "Node name for S&R": "VHS_VideoCombine" 409 | }, 410 | "widgets_values": { 411 | "frame_rate": 16, 412 | "loop_count": 0, 413 | "filename_prefix": "wan/videos", 414 | "format": "video/h264-mp4", 415 | "pix_fmt": "yuv420p", 416 | "crf": 19, 417 | "save_metadata": true, 418 | "trim_to_audio": false, 419 | "pingpong": false, 420 | "save_output": true, 421 | "videopreview": { 422 | "hidden": false, 423 | "paused": false, 424 | "params": { 425 | "filename": "videos_00004.mp4", 426 | "subfolder": "Hunyuan", 427 | "type": "output", 428 | "format": "video/nvenc_h264-mp4", 429 | "frame_rate": 24, 430 | "workflow": "videos_00004.png", 431 | "fullpath": "/home/ti/ComfyUI/output/Hunyuan/videos_00004.mp4" 432 | }, 433 | "muted": false 434 | } 435 | } 436 | }, 437 | { 438 | "id": 28, 439 | "type": "SaveAnimatedWEBP", 440 | "pos": [ 441 | 1264.3070068359375, 442 | 419.1758117675781 443 | ], 444 | "size": [ 445 | 870.8511352539062, 446 | 643.7430419921875 447 | ], 448 | "flags": {}, 449 | "order": 10, 450 | "mode": 0, 451 | "inputs": [ 452 | { 453 | "name": "images", 454 | "type": "IMAGE", 455 | "link": 56 456 | } 457 | ], 458 | "outputs": [], 459 | "properties": {}, 460 | "widgets_values": [ 461 | "ComfyUI", 462 | 16, 463 | false, 464 | 90, 465 | "default" 466 | ] 467 | }, 468 | { 469 | "id": 51, 470 | "type": "Note", 471 | "pos": [ 472 | 20.718666076660156, 473 | 459.1051940917969 474 | ], 475 | "size": [ 476 | 234.0926055908203, 477 | 467.5384826660156 478 | ], 479 | "flags": {}, 480 | "order": 4, 481 | "mode": 0, 482 | "inputs": [], 483 | "outputs": [], 484 | "properties": { 485 | "text": "" 486 | }, 487 | "widgets_values": [ 488 | "\nYoutube channel: https://www.youtube.com/@tech-practice9805\n\n\n\nWan 2.1 text to video. \n\nFor mdoel download, see https://comfyanonymous.github.io/ComfyUI_examples/wan/\n\nCan also use the 14B model" 489 | ], 490 | "color": "#432", 491 | "bgcolor": "#653" 492 | } 493 | ], 494 | "links": [ 495 | [ 496 | 35, 497 | 3, 498 | 0, 499 | 8, 500 | 0, 501 | "LATENT" 502 | ], 503 | [ 504 | 46, 505 | 6, 506 | 0, 507 | 3, 508 | 1, 509 | "CONDITIONING" 510 | ], 511 | [ 512 | 52, 513 | 7, 514 | 0, 515 | 3, 516 | 2, 517 | "CONDITIONING" 518 | ], 519 | [ 520 | 56, 521 | 8, 522 | 0, 523 | 28, 524 | 0, 525 | "IMAGE" 526 | ], 527 | [ 528 | 74, 529 | 38, 530 | 0, 531 | 6, 532 | 0, 533 | "CLIP" 534 | ], 535 | [ 536 | 75, 537 | 38, 538 | 0, 539 | 7, 540 | 0, 541 | "CLIP" 542 | ], 543 | [ 544 | 76, 545 | 39, 546 | 0, 547 | 8, 548 | 1, 549 | "VAE" 550 | ], 551 | [ 552 | 91, 553 | 40, 554 | 0, 555 | 3, 556 | 3, 557 | "LATENT" 558 | ], 559 | [ 560 | 94, 561 | 37, 562 | 0, 563 | 48, 564 | 0, 565 | "MODEL" 566 | ], 567 | [ 568 | 95, 569 | 48, 570 | 0, 571 | 3, 572 | 0, 573 | "MODEL" 574 | ], 575 | [ 576 | 96, 577 | 8, 578 | 0, 579 | 49, 580 | 0, 581 | "IMAGE" 582 | ] 583 | ], 584 | "groups": [], 585 | "config": {}, 586 | "extra": { 587 | "ds": { 588 | "scale": 1.1167815779425243, 589 | "offset": [ 590 | 199.72222224303422, 591 | -201.28126708940837 592 | ] 593 | }, 594 | "node_versions": { 595 | "comfy-core": "0.3.14" 596 | }, 597 | "VHS_latentpreview": false, 598 | "VHS_latentpreviewrate": 0, 599 | "VHS_MetadataImage": true, 600 | "VHS_KeepIntermediate": true 601 | }, 602 | "version": 0.4 603 | } -------------------------------------------------------------------------------- /workflow_sdxl_lcm_lora.json: -------------------------------------------------------------------------------- 1 | { 2 | "last_node_id": 12, 3 | "last_link_id": 22, 4 | "nodes": [ 5 | { 6 | "id": 7, 7 | "type": "CLIPTextEncode", 8 | "pos": [ 9 | 413, 10 | 389 11 | ], 12 | "size": { 13 | "0": 425.27801513671875, 14 | "1": 180.6060791015625 15 | }, 16 | "flags": {}, 17 | "order": 3, 18 | "mode": 0, 19 | "inputs": [ 20 | { 21 | "name": "clip", 22 | "type": "CLIP", 23 | "link": 5 24 | } 25 | ], 26 | "outputs": [ 27 | { 28 | "name": "CONDITIONING", 29 | "type": "CONDITIONING", 30 | "links": [ 31 | 6 32 | ], 33 | "slot_index": 0 34 | } 35 | ], 36 | "properties": { 37 | "Node name for S&R": "CLIPTextEncode" 38 | }, 39 | "widgets_values": [ 40 | "text, watermark" 41 | ] 42 | }, 43 | { 44 | "id": 6, 45 | "type": "CLIPTextEncode", 46 | "pos": [ 47 | 415, 48 | 186 49 | ], 50 | "size": { 51 | "0": 422.84503173828125, 52 | "1": 164.31304931640625 53 | }, 54 | "flags": {}, 55 | "order": 2, 56 | "mode": 0, 57 | "inputs": [ 58 | { 59 | "name": "clip", 60 | "type": "CLIP", 61 | "link": 3 62 | } 63 | ], 64 | "outputs": [ 65 | { 66 | "name": "CONDITIONING", 67 | "type": "CONDITIONING", 68 | "links": [ 69 | 4 70 | ], 71 | "slot_index": 0 72 | } 73 | ], 74 | "properties": { 75 | "Node name for S&R": "CLIPTextEncode" 76 | }, 77 | "widgets_values": [ 78 | "beautiful scenery nature glass bottle landscape, , purple galaxy bottle," 79 | ] 80 | }, 81 | { 82 | "id": 5, 83 | "type": "EmptyLatentImage", 84 | "pos": [ 85 | 473, 86 | 609 87 | ], 88 | "size": { 89 | "0": 315, 90 | "1": 106 91 | }, 92 | "flags": {}, 93 | "order": 0, 94 | "mode": 0, 95 | "outputs": [ 96 | { 97 | "name": "LATENT", 98 | "type": "LATENT", 99 | "links": [ 100 | 2 101 | ], 102 | "slot_index": 0 103 | } 104 | ], 105 | "properties": { 106 | "Node name for S&R": "EmptyLatentImage" 107 | }, 108 | "widgets_values": [ 109 | 1024, 110 | 1024, 111 | 1 112 | ] 113 | }, 114 | { 115 | "id": 8, 116 | "type": "VAEDecode", 117 | "pos": [ 118 | 1209, 119 | 188 120 | ], 121 | "size": { 122 | "0": 210, 123 | "1": 46 124 | }, 125 | "flags": {}, 126 | "order": 7, 127 | "mode": 0, 128 | "inputs": [ 129 | { 130 | "name": "samples", 131 | "type": "LATENT", 132 | "link": 7 133 | }, 134 | { 135 | "name": "vae", 136 | "type": "VAE", 137 | "link": 20, 138 | "slot_index": 1 139 | } 140 | ], 141 | "outputs": [ 142 | { 143 | "name": "IMAGE", 144 | "type": "IMAGE", 145 | "links": [ 146 | 9 147 | ], 148 | "slot_index": 0 149 | } 150 | ], 151 | "properties": { 152 | "Node name for S&R": "VAEDecode" 153 | } 154 | }, 155 | { 156 | "id": 9, 157 | "type": "SaveImage", 158 | "pos": [ 159 | 1456, 160 | 190 161 | ], 162 | "size": { 163 | "0": 863.365478515625, 164 | "1": 931.8978881835938 165 | }, 166 | "flags": {}, 167 | "order": 8, 168 | "mode": 0, 169 | "inputs": [ 170 | { 171 | "name": "images", 172 | "type": "IMAGE", 173 | "link": 9 174 | } 175 | ], 176 | "properties": {}, 177 | "widgets_values": [ 178 | "bench/ComfyUI" 179 | ] 180 | }, 181 | { 182 | "id": 11, 183 | "type": "ModelSamplingDiscrete", 184 | "pos": [ 185 | 435, 186 | -64 187 | ], 188 | "size": { 189 | "0": 315, 190 | "1": 82 191 | }, 192 | "flags": {}, 193 | "order": 5, 194 | "mode": 0, 195 | "inputs": [ 196 | { 197 | "name": "model", 198 | "type": "MODEL", 199 | "link": 15 200 | } 201 | ], 202 | "outputs": [ 203 | { 204 | "name": "MODEL", 205 | "type": "MODEL", 206 | "links": [ 207 | 22 208 | ], 209 | "shape": 3, 210 | "slot_index": 0 211 | } 212 | ], 213 | "properties": { 214 | "Node name for S&R": "ModelSamplingDiscrete" 215 | }, 216 | "widgets_values": [ 217 | "lcm", 218 | false 219 | ] 220 | }, 221 | { 222 | "id": 4, 223 | "type": "CheckpointLoaderSimple", 224 | "pos": [ 225 | -329, 226 | 236 227 | ], 228 | "size": { 229 | "0": 315, 230 | "1": 98 231 | }, 232 | "flags": {}, 233 | "order": 1, 234 | "mode": 0, 235 | "outputs": [ 236 | { 237 | "name": "MODEL", 238 | "type": "MODEL", 239 | "links": [ 240 | 10 241 | ], 242 | "slot_index": 0 243 | }, 244 | { 245 | "name": "CLIP", 246 | "type": "CLIP", 247 | "links": [ 248 | 3, 249 | 5, 250 | 11 251 | ], 252 | "slot_index": 1 253 | }, 254 | { 255 | "name": "VAE", 256 | "type": "VAE", 257 | "links": [ 258 | 20 259 | ], 260 | "slot_index": 2 261 | } 262 | ], 263 | "properties": { 264 | "Node name for S&R": "CheckpointLoaderSimple" 265 | }, 266 | "widgets_values": [ 267 | "sd_xl_base_1.0.safetensors" 268 | ] 269 | }, 270 | { 271 | "id": 10, 272 | "type": "LoraLoader", 273 | "pos": [ 274 | 54, 275 | -43 276 | ], 277 | "size": { 278 | "0": 315, 279 | "1": 126 280 | }, 281 | "flags": {}, 282 | "order": 4, 283 | "mode": 0, 284 | "inputs": [ 285 | { 286 | "name": "model", 287 | "type": "MODEL", 288 | "link": 10 289 | }, 290 | { 291 | "name": "clip", 292 | "type": "CLIP", 293 | "link": 11 294 | } 295 | ], 296 | "outputs": [ 297 | { 298 | "name": "MODEL", 299 | "type": "MODEL", 300 | "links": [ 301 | 15 302 | ], 303 | "shape": 3, 304 | "slot_index": 0 305 | }, 306 | { 307 | "name": "CLIP", 308 | "type": "CLIP", 309 | "links": null, 310 | "shape": 3, 311 | "slot_index": 1 312 | } 313 | ], 314 | "properties": { 315 | "Node name for S&R": "LoraLoader" 316 | }, 317 | "widgets_values": [ 318 | "pytorch_lora_weights_sdxl.safetensors", 319 | 1, 320 | 1 321 | ] 322 | }, 323 | { 324 | "id": 3, 325 | "type": "KSampler", 326 | "pos": [ 327 | 863, 328 | 186 329 | ], 330 | "size": { 331 | "0": 315, 332 | "1": 262 333 | }, 334 | "flags": {}, 335 | "order": 6, 336 | "mode": 0, 337 | "inputs": [ 338 | { 339 | "name": "model", 340 | "type": "MODEL", 341 | "link": 22 342 | }, 343 | { 344 | "name": "positive", 345 | "type": "CONDITIONING", 346 | "link": 4 347 | }, 348 | { 349 | "name": "negative", 350 | "type": "CONDITIONING", 351 | "link": 6 352 | }, 353 | { 354 | "name": "latent_image", 355 | "type": "LATENT", 356 | "link": 2 357 | } 358 | ], 359 | "outputs": [ 360 | { 361 | "name": "LATENT", 362 | "type": "LATENT", 363 | "links": [ 364 | 7 365 | ], 366 | "slot_index": 0 367 | } 368 | ], 369 | "properties": { 370 | "Node name for S&R": "KSampler" 371 | }, 372 | "widgets_values": [ 373 | 344358515523812, 374 | "randomize", 375 | 4, 376 | 1.8, 377 | "lcm", 378 | "sgm_uniform", 379 | 1 380 | ] 381 | } 382 | ], 383 | "links": [ 384 | [ 385 | 2, 386 | 5, 387 | 0, 388 | 3, 389 | 3, 390 | "LATENT" 391 | ], 392 | [ 393 | 3, 394 | 4, 395 | 1, 396 | 6, 397 | 0, 398 | "CLIP" 399 | ], 400 | [ 401 | 4, 402 | 6, 403 | 0, 404 | 3, 405 | 1, 406 | "CONDITIONING" 407 | ], 408 | [ 409 | 5, 410 | 4, 411 | 1, 412 | 7, 413 | 0, 414 | "CLIP" 415 | ], 416 | [ 417 | 6, 418 | 7, 419 | 0, 420 | 3, 421 | 2, 422 | "CONDITIONING" 423 | ], 424 | [ 425 | 7, 426 | 3, 427 | 0, 428 | 8, 429 | 0, 430 | "LATENT" 431 | ], 432 | [ 433 | 9, 434 | 8, 435 | 0, 436 | 9, 437 | 0, 438 | "IMAGE" 439 | ], 440 | [ 441 | 10, 442 | 4, 443 | 0, 444 | 10, 445 | 0, 446 | "MODEL" 447 | ], 448 | [ 449 | 11, 450 | 4, 451 | 1, 452 | 10, 453 | 1, 454 | "CLIP" 455 | ], 456 | [ 457 | 15, 458 | 10, 459 | 0, 460 | 11, 461 | 0, 462 | "MODEL" 463 | ], 464 | [ 465 | 20, 466 | 4, 467 | 2, 468 | 8, 469 | 1, 470 | "VAE" 471 | ], 472 | [ 473 | 22, 474 | 11, 475 | 0, 476 | 3, 477 | 0, 478 | "MODEL" 479 | ] 480 | ], 481 | "groups": [], 482 | "config": {}, 483 | "extra": {}, 484 | "version": 0.4 485 | } --------------------------------------------------------------------------------