├── .dockerignore ├── .gitignore ├── .gitmodules ├── LICENSE ├── README.md ├── cog.yaml ├── helpers ├── ComfyUI_Controlnet_Aux.py ├── ComfyUI_IPAdapter_plus.py └── comfyui.py ├── material_transfer_api.json ├── material_transfer_ui.json ├── predict.py ├── scripts ├── clone_plugins.sh └── reset.sh ├── weights.json ├── weights_downloader.py └── weights_manifest.py /.dockerignore: -------------------------------------------------------------------------------- 1 | **/__pycache__ 2 | **/.git 3 | **/.github 4 | **/.ci 5 | 6 | # Outputs 7 | *.jpg 8 | *.png 9 | *.gif 10 | *.mp4 11 | *.zip 12 | *.webp 13 | *.webm 14 | 15 | # Models 16 | *.ckpt 17 | *.safetensors 18 | *.pth 19 | *.bin 20 | *.onnx 21 | *.torchscript 22 | # .pt files are used by efficiency-nodes-comfyui 23 | 24 | # Files 25 | scripts/* 26 | updated_weights.json 27 | 28 | # Extension files 29 | *.ipynb 30 | *.bat 31 | 32 | # ComfyUI 33 | ComfyUI/venv 34 | ComfyUI/temp 35 | ComfyUI/user 36 | ComfyUI/models 37 | ComfyUI/custom_nodes/comfyUI_controlnet_aux/ckpts 38 | ComfyUI/custom_nodes/ComfyUI-AnimateDiff-Evolved/models 39 | ComfyUI/custom_nodes/ComfyUI-AnimateDiff-Evolved/motion_lora 40 | ComfyUI/custom_nodes/efficiency-nodes-comfyui/images 41 | 42 | # ComfyUI bits we just don’t need 43 | ComfyUI/tests 44 | ComfyUI/tests-ui 45 | ComfyUI/notebooks 46 | ComfyUI/script_examples 47 | ComfyUI/comfyui_screenshot.png 48 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | __pycache__ 2 | *.png 3 | *.jpg 4 | *.gif 5 | *.mp4 6 | *.zip 7 | *.ckpt 8 | *.safetensors 9 | *.pth 10 | *.bin 11 | *.torchscript 12 | *.webp 13 | weights.txt 14 | manifest.txt 15 | updated_weights.json 16 | .cog 17 | -------------------------------------------------------------------------------- /.gitmodules: -------------------------------------------------------------------------------- 1 | [submodule "ComfyUI"] 2 | path = ComfyUI 3 | url = https://github.com/comfyanonymous/ComfyUI.git 4 | commit = eecd69b53a896343775bcb02a4f8349e7442ffd1 5 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2024 fofr 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # cog-material-transfer 2 | 3 | - Based on the ComfyUI workflow by [DreamStarter](https://github.com/iordcalin/Comfy_Workflows) 4 | - Using [ZeST (Zero-Shot Material Transfer from a Single Image)](https://github.com/ttchengab/zest_code/) in [a custom ComfyUI node](https://github.com/kealiu/ComfyUI-ZeroShot-MTrans) 5 | 6 | Run it on Replicate: 7 | 8 | https://replicate.com/iordcalin/material-transfer 9 | 10 | Or run the `material_transfer_ui.json` workflow in your own ComfyUI instance. See [this Twitter thread](https://twitter.com/DreamStarter_1/status/1784962492822679782) for help setting it up. 11 | -------------------------------------------------------------------------------- /cog.yaml: -------------------------------------------------------------------------------- 1 | build: 2 | gpu: true 3 | cuda: "12.1" 4 | system_packages: 5 | - ffmpeg 6 | python_version: "3.10.6" 7 | python_packages: 8 | - torch 9 | - torchvision 10 | - torchaudio 11 | - torchsde 12 | - einops 13 | - transformers>=4.25.1 14 | - safetensors>=0.3.0 15 | - aiohttp 16 | - accelerate 17 | - pyyaml 18 | - Pillow 19 | - scipy 20 | - tqdm 21 | - psutil 22 | - kornia>=0.7.1 23 | - websocket-client==1.6.3 24 | 25 | # layerdiffuse 26 | - diffusers>=0.25.0 27 | 28 | # fix for pydantic issues in cog 29 | # https://github.com/replicate/cog/issues/1623 30 | - albumentations==1.4.3 31 | 32 | # was-node-suite-comfyui 33 | # https://github.com/WASasquatch/was-node-suite-comfyui/blob/main/requirements.txt 34 | - cmake 35 | - imageio 36 | - joblib 37 | - matplotlib 38 | - pilgram 39 | - scikit-learn 40 | - rembg 41 | 42 | # ComfyUI_essentials 43 | - numba 44 | 45 | # ComfyUI_FizzNodes 46 | - pandas 47 | - numexpr 48 | 49 | # comfyui-reactor-node 50 | - insightface 51 | - onnx 52 | 53 | # ComfyUI-Impact-Pack 54 | - segment-anything 55 | - piexif 56 | 57 | # ComfyUI-Impact-Subpack 58 | - ultralytics!=8.0.177 59 | 60 | # comfyui_segment_anything 61 | - timm 62 | 63 | # comfyui_controlnet_aux 64 | # https://github.com/Fannovel16/comfyui_controlnet_aux/blob/main/requirements.txt 65 | - importlib_metadata 66 | - opencv-python-headless>=4.0.1.24 67 | - filelock 68 | - numpy 69 | - einops 70 | - pyyaml 71 | - scikit-image 72 | - python-dateutil 73 | - mediapipe 74 | - svglib 75 | - fvcore 76 | - yapf 77 | - omegaconf 78 | - ftfy 79 | - addict 80 | - yacs 81 | - trimesh[easy] 82 | 83 | # ComfyUI-KJNodes 84 | - librosa 85 | - color-matcher 86 | run: 87 | - curl -o /usr/local/bin/pget -L "https://github.com/replicate/pget/releases/download/v0.6.0/pget_linux_x86_64" && chmod +x /usr/local/bin/pget 88 | - pip install onnxruntime-gpu --extra-index-url https://aiinfra.pkgs.visualstudio.com/PublicPackages/_packaging/onnxruntime-cuda-12/pypi/simple/ 89 | predict: "predict.py:Predictor" 90 | -------------------------------------------------------------------------------- /helpers/ComfyUI_Controlnet_Aux.py: -------------------------------------------------------------------------------- 1 | MODELS = { 2 | "UNet.pth": "bdsqlsz/qinglong_controlnet-lllite/Annotators", 3 | "mobile_sam.pt": "dhkim2810/MobileSAM", 4 | "hrnetv2_w64_imagenet_pretrained.pth": "hr16/ControlNet-HandRefiner-pruned", 5 | "graphormer_hand_state_dict.bin": "hr16/ControlNet-HandRefiner-pruned", 6 | "rtmpose-m_ap10k_256_bs5.torchscript.pt": "hr16/DWPose-TorchScript-BatchSize5", 7 | "dw-ll_ucoco_384_bs5.torchscript.pt": "hr16/DWPose-TorchScript-BatchSize5", 8 | "rtmpose-m_ap10k_256.onnx": "hr16/UnJIT-DWPose", 9 | "yolo_nas_s_fp16.onnx": "hr16/yolo-nas-fp16", 10 | "yolo_nas_m_fp16.onnx": "hr16/yolo-nas-fp16", 11 | "yolox_l.torchscript.pt": "hr16/yolox-onnx", 12 | "densepose_r101_fpn_dl.torchscript": "LayerNorm/DensePose-TorchScript-with-hint-image", 13 | "densepose_r50_fpn_dl.torchscript": "LayerNorm/DensePose-TorchScript-with-hint-image", 14 | "mlsd_large_512_fp32.pth": "lllyasviel/Annotators", 15 | "150_16_swin_l_oneformer_coco_100ep.pth": "lllyasviel/Annotators", 16 | "ControlNetHED.pth": "lllyasviel/Annotators", 17 | "ZoeD_M12_N.pt": "lllyasviel/Annotators", 18 | "scannet.pt": "lllyasviel/Annotators", 19 | "hand_pose_model.pth": "lllyasviel/Annotators", 20 | "upernet_global_small.pth": "lllyasviel/Annotators", 21 | "latest_net_G.pth": "lllyasviel/Annotators", 22 | "netG.pth": "lllyasviel/Annotators", 23 | "sk_model2.pth": "lllyasviel/Annotators", 24 | "dpt_hybrid-midas-501f0c75.pt": "lllyasviel/Annotators", 25 | "table5_pidinet.pth": "lllyasviel/Annotators", 26 | "erika.pth": "lllyasviel/Annotators", 27 | "250_16_swin_l_oneformer_ade20k_160k.pth": "lllyasviel/Annotators", 28 | "sk_model.pth": "lllyasviel/Annotators", 29 | "body_pose_model.pth": "lllyasviel/Annotators", 30 | "res101.pth": "lllyasviel/Annotators", 31 | "facenet.pth": "lllyasviel/Annotators", 32 | "isnetis.ckpt": "skytnt/anime-seg", 33 | "yolox_l.onnx": "yzd-v/DWPose", 34 | "dw-ll_ucoco_384.onnx": "yzd-v/DWPose", 35 | "7_model.pth": "bdsqlsz/qinglong_controlnet-lllite/Annotators", 36 | "gmflow-scale1-mixdata.pth": "hr16/Unimatch", 37 | "gmflow-scale2-mixdata.pth": "hr16/Unimatch", 38 | "gmflow-scale2-regrefine6-mixdata.pth": "hr16/Unimatch", 39 | "depth_anything_vitl14.pth": "LiheYoung/Depth-Anything/checkpoints", 40 | "depth_anything_vitb14.pth": "LiheYoung/Depth-Anything/checkpoints", 41 | "depth_anything_vits14.pth": "LiheYoung/Depth-Anything/checkpoints", 42 | "diffusion_edge_indoor.pt": "hr16/Diffusion-Edge", 43 | "diffusion_edge_natrual.pt": "hr16/Diffusion-Edge", # (model has a typo) 44 | "diffusion_edge_urban.pt": "hr16/Diffusion-Edge", 45 | "dsine.pt": "hr16/Diffusion-Edge", 46 | "swin_b-68c6b09e.pth": "torch", 47 | "vgg16-397923af.pth": "torch", 48 | "depth_anything_metric_depth_indoor.pt": "LiheYoung/Depth-Anything/checkpoints_metric_depth", 49 | "depth_anything_metric_depth_outdoor.pt": "LiheYoung/Depth-Anything/checkpoints_metric_depth", 50 | } 51 | 52 | 53 | class ComfyUI_Controlnet_Aux: 54 | @staticmethod 55 | def models(): 56 | return MODELS 57 | 58 | @staticmethod 59 | def weights_map(base_url): 60 | return { 61 | key: { 62 | "url": f"{base_url}/custom_nodes/comfyui_controlnet_aux/{key}.tar", 63 | "dest": f"ComfyUI/custom_nodes/comfyui_controlnet_aux/ckpts/{MODELS[key]}", 64 | } 65 | for key in MODELS 66 | } 67 | 68 | # Controlnet preprocessor models are not included in the API JSON 69 | # We need to add them manually based on the nodes being used to 70 | # avoid them being downloaded automatically from elsewhere 71 | @staticmethod 72 | def node_class_mapping(): 73 | return { 74 | # Depth 75 | "MiDaS-NormalMapPreprocessor": "dpt_hybrid-midas-501f0c75.pt", 76 | "MiDaS-DepthMapPreprocessor": "dpt_hybrid-midas-501f0c75.pt", 77 | "Zoe-DepthMapPreprocessor": "ZoeD_M12_N.pt", 78 | "LeReS-DepthMapPreprocessor": ["res101.pth", "latest_net_G.pth"], 79 | "MeshGraphormer-DepthMapPreprocessor": [ 80 | "hrnetv2_w64_imagenet_pretrained.pth", 81 | "graphormer_hand_state_dict.bin", 82 | ], 83 | "DepthAnythingPreprocessor": [ 84 | "depth_anything_vitl14.pth", 85 | "depth_anything_vitb14.pth", 86 | "depth_anything_vits14.pth", 87 | ], 88 | "Zoe_DepthAnythingPreprocessor": [ 89 | "depth_anything_metric_depth_indoor.pt", 90 | "depth_anything_metric_depth_outdoor.pt", 91 | ], 92 | # Segmentation 93 | "BAE-NormalMapPreprocessor": "scannet.pt", 94 | "OneFormer-COCO-SemSegPreprocessor": "150_16_swin_l_oneformer_coco_100ep.pth", 95 | "OneFormer-ADE20K-SemSegPreprocessor": "250_16_swin_l_oneformer_ade20k_160k.pth", 96 | "UniFormer-SemSegPreprocessor": "upernet_global_small.pth", 97 | "SemSegPreprocessor": "upernet_global_small.pth", 98 | "AnimeFace_SemSegPreprocessor": ["UNet.pth", "isnetis.ckpt"], 99 | "SAMPreprocessor": "mobile_sam.pt", 100 | "DSINE-NormalMapPreprocessor": "dsine.pt", 101 | # Line extractors 102 | "AnimeLineArtPreprocessor": "netG.pth", 103 | "HEDPreprocessor": "ControlNetHED.pth", 104 | "FakeScribblePreprocessor": "ControlNetHED.pth", 105 | "M-LSDPreprocessor": "mlsd_large_512_fp32.pth", 106 | "PiDiNetPreprocessor": "table5_pidinet.pth", 107 | "LineArtPreprocessor": ["sk_model.pth", "sk_model2.pth"], 108 | "Manga2Anime_LineArt_Preprocessor": "erika.pth", 109 | "TEEDPreprocessor": "7_model.pth", 110 | "DiffusionEdge_Preprocessor": [ 111 | "diffusion_edge_indoor.pt", 112 | "diffusion_edge_natrual.pt", # model has a typo 113 | "diffusion_edge_urban.pt", 114 | "vgg16-397923af.pth", 115 | "swin_b-68c6b09e.pth", 116 | ], 117 | # Pose 118 | "OpenposePreprocessor": [ 119 | "body_pose_model.pth", 120 | "hand_pose_model.pth", 121 | "facenet.pth", 122 | ], 123 | # Optical flow 124 | "Unimatch_OptFlowPreprocessor": [ 125 | "gmflow-scale1-mixdata.pth", 126 | "gmflow-scale2-mixdata.pth", 127 | "gmflow-scale2-regrefine6-mixdata.pth", 128 | ], 129 | } 130 | 131 | @staticmethod 132 | def add_weights(weights_to_download, node): 133 | node_class = node.get("class_type") 134 | node_mapping = ComfyUI_Controlnet_Aux.node_class_mapping() 135 | 136 | if node_class and node_class in node_mapping: 137 | class_weights = node_mapping[node_class] 138 | weights_to_download.extend( 139 | class_weights if isinstance(class_weights, list) else [class_weights] 140 | ) 141 | 142 | # Additional check for AIO_Preprocessor and its preprocessor input value 143 | if node_class == "AIO_Preprocessor" and "preprocessor" in node.get( 144 | "inputs", {} 145 | ): 146 | preprocessor = node["inputs"]["preprocessor"] 147 | if preprocessor in node_mapping: 148 | preprocessor_weights = node_mapping[preprocessor] 149 | weights_to_download.extend( 150 | preprocessor_weights 151 | if isinstance(preprocessor_weights, list) 152 | else [preprocessor_weights] 153 | ) 154 | -------------------------------------------------------------------------------- /helpers/ComfyUI_IPAdapter_plus.py: -------------------------------------------------------------------------------- 1 | import os 2 | 3 | # List of presets 4 | PRESETS = [ 5 | # IPAdapterUnifiedLoader 6 | "LIGHT - SD1.5 only (low strength)", 7 | "STANDARD (medium strength)", 8 | "VIT-G (medium strength)", 9 | "PLUS (high strength)", 10 | "PLUS FACE (portraits)", 11 | "FULL FACE - SD1.5 only (portraits stronger)", 12 | 13 | # IPAdapterUnifiedLoaderFaceID 14 | "FACEID", 15 | "FACEID PLUS - SD1.5 only", 16 | "FACEID PLUS V2", 17 | "FACEID PORTRAIT (style transfer)", 18 | "FACEID PORTRAIT UNNORM - SDXL only (strong)", 19 | 20 | # IPAdapterUnifiedLoaderCommunity 21 | "Composition", 22 | ] 23 | 24 | 25 | class ComfyUI_IPAdapter_plus: 26 | @staticmethod 27 | def prepare(): 28 | # create the ipadapter folder in ComfyUI/models/ipadapter 29 | # if it doesn't exist at setup time then the plugin defers to the base directory 30 | # and won't look for our ipadaters that are downloaded on demand 31 | if not os.path.exists("ComfyUI/models/ipadapter"): 32 | os.makedirs("ComfyUI/models/ipadapter") 33 | 34 | @staticmethod 35 | def get_preset_weights(preset): 36 | is_insightface = False 37 | weights_to_add = [] 38 | 39 | # clipvision 40 | if preset.startswith("VIT-G"): 41 | weights_to_add.append("CLIP-ViT-bigG-14-laion2B-39B-b160k.safetensors") 42 | else: 43 | weights_to_add.append("CLIP-ViT-H-14-laion2B-s32B-b79K.safetensors") 44 | 45 | # ipadapters 46 | if preset.startswith("LIGHT"): 47 | weights_to_add.append("ip-adapter_sd15_light_v11.bin") 48 | 49 | if preset.startswith("STANDARD"): 50 | weights_to_add.extend( 51 | ["ip-adapter_sd15.safetensors", "ip-adapter_sdxl_vit-h.safetensors"] 52 | ) 53 | 54 | if preset.startswith("VIT-G"): 55 | weights_to_add.extend( 56 | ["ip-adapter_sd15_vit-G.safetensors", "ip-adapter_sdxl.safetensors"] 57 | ) 58 | 59 | if preset.startswith("PLUS ("): 60 | weights_to_add.extend( 61 | [ 62 | "ip-adapter-plus_sd15.safetensors", 63 | "ip-adapter-plus_sdxl_vit-h.safetensors", 64 | ] 65 | ) 66 | 67 | if preset.startswith("PLUS FACE"): 68 | weights_to_add.extend( 69 | [ 70 | "ip-adapter-plus-face_sd15.safetensors", 71 | "ip-adapter-plus-face_sdxl_vit-h.safetensors", 72 | ] 73 | ) 74 | 75 | if preset.startswith("FULL FACE"): 76 | weights_to_add.append("ip-adapter-full-face_sd15.safetensors") 77 | 78 | if preset == "FACEID": 79 | is_insightface = True 80 | weights_to_add.extend( 81 | [ 82 | "ip-adapter-faceid_sd15.bin", 83 | "ip-adapter-faceid_sdxl.bin", 84 | "ip-adapter-faceid_sd15_lora.safetensors", 85 | "ip-adapter-faceid_sdxl_lora.safetensors", 86 | ] 87 | ) 88 | 89 | if preset.startswith("FACEID PORTRAIT UNNORM"): 90 | is_insightface = True 91 | weights_to_add.extend( 92 | [ 93 | "ip-adapter-faceid-portrait-unnorm_sdxl.bin", 94 | ] 95 | ) 96 | 97 | if preset.startswith("FACEID PORTRAIT ("): 98 | is_insightface = True 99 | weights_to_add.extend( 100 | [ 101 | "ip-adapter-faceid-portrait-v11_sd15.bin", 102 | "ip-adapter-faceid-portrait_sdxl.bin", 103 | ] 104 | ) 105 | 106 | if preset.startswith("FACEID PLUS - "): 107 | is_insightface = True 108 | weights_to_add.extend( 109 | [ 110 | "ip-adapter-faceid-plus_sd15.bin", 111 | "ip-adapter-faceid-plus_sd15_lora.safetensors", 112 | ] 113 | ) 114 | 115 | if preset.startswith("FACEID PLUS V2"): 116 | is_insightface = True 117 | weights_to_add.extend( 118 | [ 119 | "ip-adapter-faceid-plusv2_sd15.bin", 120 | "ip-adapter-faceid-plusv2_sdxl.bin", 121 | "ip-adapter-faceid-plusv2_sd15_lora.safetensors", 122 | "ip-adapter-faceid-plusv2_sdxl_lora.safetensors", 123 | ] 124 | ) 125 | 126 | if preset.startswith("Composition"): 127 | weights_to_add.extend( 128 | [ 129 | "ip_plus_composition_sd15.safetensors", 130 | "ip_plus_composition_sdxl.safetensors", 131 | ] 132 | ) 133 | 134 | if is_insightface: 135 | weights_to_add.append("models/buffalo_l") 136 | 137 | return weights_to_add 138 | 139 | @staticmethod 140 | def add_weights(weights_to_download, node): 141 | if "class_type" in node and node["class_type"] in [ 142 | "IPAdapterUnifiedLoader", 143 | "IPAdapterUnifiedLoaderFaceID", 144 | "IPAdapterUnifiedLoaderCommunity", 145 | ]: 146 | preset = node["inputs"]["preset"] 147 | print(f"Including weights for IPAdapter preset: {preset}") 148 | if preset: 149 | weights_to_download.extend( 150 | ComfyUI_IPAdapter_plus.get_preset_weights(preset) 151 | ) 152 | elif "class_type" in node and node["class_type"] in [ 153 | "IPAdapterInsightFaceLoader", 154 | ]: 155 | weights_to_download.append("models/buffalo_l") 156 | -------------------------------------------------------------------------------- /helpers/comfyui.py: -------------------------------------------------------------------------------- 1 | import os 2 | import urllib.request 3 | import subprocess 4 | import threading 5 | import time 6 | import json 7 | import urllib 8 | import uuid 9 | import json 10 | import os 11 | import websocket 12 | import random 13 | from weights_downloader import WeightsDownloader 14 | from urllib.error import URLError 15 | import requests 16 | 17 | 18 | # custom_nodes helpers 19 | from helpers.ComfyUI_IPAdapter_plus import ComfyUI_IPAdapter_plus 20 | from helpers.ComfyUI_Controlnet_Aux import ComfyUI_Controlnet_Aux 21 | 22 | 23 | class ComfyUI: 24 | def __init__(self, server_address): 25 | self.weights_downloader = WeightsDownloader() 26 | self.server_address = server_address 27 | ComfyUI_IPAdapter_plus.prepare() 28 | 29 | def start_server(self, output_directory, input_directory): 30 | self.input_directory = input_directory 31 | self.output_directory = output_directory 32 | 33 | self.download_pre_start_models() 34 | 35 | server_thread = threading.Thread( 36 | target=self.run_server, args=(output_directory, input_directory) 37 | ) 38 | server_thread.start() 39 | 40 | start_time = time.time() 41 | while not self.is_server_running(): 42 | if time.time() - start_time > 60: # If more than a minute has passed 43 | raise TimeoutError("Server did not start within 60 seconds") 44 | time.sleep(1) # Wait for 1 second before checking again 45 | 46 | print("Server running") 47 | 48 | def run_server(self, output_directory, input_directory): 49 | command = f"python ./ComfyUI/main.py --output-directory {output_directory} --input-directory {input_directory} --disable-metadata --gpu-only" 50 | server_process = subprocess.Popen(command, shell=True) 51 | server_process.wait() 52 | 53 | def is_server_running(self): 54 | try: 55 | with urllib.request.urlopen( 56 | "http://{}/history/{}".format(self.server_address, "123") 57 | ) as response: 58 | return response.status == 200 59 | except URLError: 60 | return False 61 | 62 | def download_pre_start_models(self): 63 | # Some models need to be downloaded and loaded before starting ComfyUI 64 | self.weights_downloader.download_torch_checkpoints() 65 | 66 | def handle_weights(self, workflow): 67 | print("Checking weights") 68 | embeddings = self.weights_downloader.get_weights_by_type("EMBEDDINGS") 69 | embedding_to_fullname = {emb.split(".")[0]: emb for emb in embeddings} 70 | weights_to_download = [] 71 | weights_filetypes = self.weights_downloader.supported_filetypes 72 | 73 | for node in workflow.values(): 74 | for handler in [ 75 | ComfyUI_Controlnet_Aux, 76 | ComfyUI_IPAdapter_plus, 77 | ]: 78 | handler.add_weights(weights_to_download, node) 79 | 80 | if "inputs" in node: 81 | for input in node["inputs"].values(): 82 | if isinstance(input, str): 83 | if any(key in input for key in embedding_to_fullname): 84 | weights_to_download.extend( 85 | embedding_to_fullname[key] 86 | for key in embedding_to_fullname 87 | if key in input 88 | ) 89 | elif any(input.endswith(ft) for ft in weights_filetypes): 90 | weights_to_download.append(input) 91 | 92 | weights_to_download = list(set(weights_to_download)) 93 | 94 | for weight in weights_to_download: 95 | self.weights_downloader.download_weights(weight) 96 | print(f"✅ {weight}") 97 | 98 | print("====================================") 99 | 100 | def is_image_or_video_value(self, value): 101 | filetypes = [".png", ".jpg", ".jpeg", ".webp", ".mp4", ".webm"] 102 | return isinstance(value, str) and any( 103 | value.lower().endswith(ft) for ft in filetypes 104 | ) 105 | 106 | def handle_inputs(self, workflow): 107 | print("Checking inputs") 108 | seen_inputs = set() 109 | for node in workflow.values(): 110 | if "inputs" in node: 111 | for input_key, input_value in node["inputs"].items(): 112 | if isinstance(input_value, str) and input_value not in seen_inputs: 113 | seen_inputs.add(input_value) 114 | if input_value.startswith(("http://", "https://")): 115 | filename = os.path.join( 116 | self.input_directory, os.path.basename(input_value) 117 | ) 118 | if not os.path.exists(filename): 119 | print(f"Downloading {input_value} to {filename}") 120 | try: 121 | response = requests.get(input_value) 122 | response.raise_for_status() 123 | with open(filename, "wb") as file: 124 | file.write(response.content) 125 | node["inputs"][input_key] = filename 126 | print(f"✅ {filename}") 127 | except requests.exceptions.RequestException as e: 128 | print(f"❌ Error downloading {input_value}: {e}") 129 | 130 | elif self.is_image_or_video_value(input_value): 131 | filename = os.path.join( 132 | self.input_directory, os.path.basename(input_value) 133 | ) 134 | if not os.path.exists(filename): 135 | print(f"❌ {filename} not provided") 136 | else: 137 | print(f"✅ {filename}") 138 | 139 | print("====================================") 140 | 141 | def connect(self): 142 | self.client_id = str(uuid.uuid4()) 143 | self.ws = websocket.WebSocket() 144 | self.ws.connect(f"ws://{self.server_address}/ws?clientId={self.client_id}") 145 | 146 | def post_request(self, endpoint, data=None): 147 | url = f"http://{self.server_address}{endpoint}" 148 | headers = {"Content-Type": "application/json"} if data else {} 149 | json_data = json.dumps(data).encode("utf-8") if data else None 150 | req = urllib.request.Request( 151 | url, data=json_data, headers=headers, method="POST" 152 | ) 153 | with urllib.request.urlopen(req) as response: 154 | if response.status != 200: 155 | print(f"Failed: {endpoint}, status code: {response.status}") 156 | 157 | # https://github.com/comfyanonymous/ComfyUI/blob/master/server.py 158 | def clear_queue(self): 159 | self.post_request("/queue", {"clear": True}) 160 | self.post_request("/interrupt") 161 | 162 | def queue_prompt(self, prompt): 163 | try: 164 | # Prompt is the loaded workflow (prompt is the label comfyUI uses) 165 | p = {"prompt": prompt, "client_id": self.client_id} 166 | data = json.dumps(p).encode("utf-8") 167 | req = urllib.request.Request( 168 | f"http://{self.server_address}/prompt?{self.client_id}", data=data 169 | ) 170 | 171 | output = json.loads(urllib.request.urlopen(req).read()) 172 | return output["prompt_id"] 173 | except urllib.error.HTTPError as e: 174 | print(f"ComfyUI error: {e.code} {e.reason}") 175 | http_error = True 176 | 177 | if http_error: 178 | raise Exception( 179 | "ComfyUI Error – Your workflow could not be run. This usually happens if you’re trying to use an unsupported node. Check the logs for 'KeyError: ' details, and go to https://github.com/fofr/cog-comfyui to see the list of supported custom nodes." 180 | ) 181 | 182 | def wait_for_prompt_completion(self, workflow, prompt_id): 183 | while True: 184 | out = self.ws.recv() 185 | if isinstance(out, str): 186 | message = json.loads(out) 187 | if message["type"] == "executing": 188 | data = message["data"] 189 | if data["node"] is None and data["prompt_id"] == prompt_id: 190 | break 191 | elif data["prompt_id"] == prompt_id: 192 | node = workflow.get(data["node"], {}) 193 | meta = node.get("_meta", {}) 194 | class_type = node.get("class_type", "Unknown") 195 | print( 196 | f"Executing node {data['node']}, title: {meta.get('title', 'Unknown')}, class type: {class_type}" 197 | ) 198 | else: 199 | continue 200 | 201 | def load_workflow(self, workflow): 202 | if not isinstance(workflow, dict): 203 | wf = json.loads(workflow) 204 | else: 205 | wf = workflow 206 | 207 | # There are two types of ComfyUI JSON 208 | # We need the API version 209 | if any(key in wf.keys() for key in ["last_node_id", "last_link_id", "version"]): 210 | raise ValueError( 211 | "You need to use the API JSON version of a ComfyUI workflow. To do this go to your ComfyUI settings and turn on 'Enable Dev mode Options'. Then you can save your ComfyUI workflow via the 'Save (API Format)' button." 212 | ) 213 | 214 | self.handle_inputs(wf) 215 | self.handle_weights(wf) 216 | return wf 217 | 218 | def randomise_input_seed(self, input_key, inputs): 219 | if input_key in inputs and isinstance(inputs[input_key], (int, float)): 220 | new_seed = random.randint(0, 2**32 - 1) 221 | print(f"Randomising {input_key} to {new_seed}") 222 | inputs[input_key] = new_seed 223 | 224 | def randomise_seeds(self, workflow): 225 | for node_id, node in workflow.items(): 226 | inputs = node.get("inputs", {}) 227 | seed_keys = ["seed", "noise_seed", "rand_seed"] 228 | for seed_key in seed_keys: 229 | self.randomise_input_seed(seed_key, inputs) 230 | 231 | def run_workflow(self, workflow): 232 | print("Running workflow") 233 | prompt_id = self.queue_prompt(workflow) 234 | self.wait_for_prompt_completion(workflow, prompt_id) 235 | output_json = self.get_history(prompt_id) 236 | print("outputs: ", output_json) 237 | print("====================================") 238 | 239 | def get_history(self, prompt_id): 240 | with urllib.request.urlopen( 241 | f"http://{self.server_address}/history/{prompt_id}" 242 | ) as response: 243 | output = json.loads(response.read()) 244 | return output[prompt_id]["outputs"] 245 | -------------------------------------------------------------------------------- /material_transfer_api.json: -------------------------------------------------------------------------------- 1 | { 2 | "4": { 3 | "inputs": { 4 | "ckpt_name": "juggernautXL_juggernautX.safetensors" 5 | }, 6 | "class_type": "CheckpointLoaderSimple", 7 | "_meta": { 8 | "title": "Load Checkpoint" 9 | } 10 | }, 11 | "6": { 12 | "inputs": { 13 | "text": "sculpture", 14 | "clip": [ 15 | "4", 16 | 1 17 | ] 18 | }, 19 | "class_type": "CLIPTextEncode", 20 | "_meta": { 21 | "title": "CLIP Text Encode (Prompt)" 22 | } 23 | }, 24 | "7": { 25 | "inputs": { 26 | "text": "", 27 | "clip": [ 28 | "4", 29 | 1 30 | ] 31 | }, 32 | "class_type": "CLIPTextEncode", 33 | "_meta": { 34 | "title": "CLIP Text Encode (Prompt)" 35 | } 36 | }, 37 | "9": { 38 | "inputs": { 39 | "filename_prefix": "ComfyUI", 40 | "images": [ 41 | "10", 42 | 5 43 | ] 44 | }, 45 | "class_type": "SaveImage", 46 | "_meta": { 47 | "title": "Save Image" 48 | } 49 | }, 50 | "10": { 51 | "inputs": { 52 | "seed": 182541674078463, 53 | "steps": 6, 54 | "cfg": 2, 55 | "sampler_name": "dpmpp_sde", 56 | "scheduler": "karras", 57 | "denoise": 1, 58 | "preview_method": "auto", 59 | "vae_decode": "true", 60 | "model": [ 61 | "49", 62 | 0 63 | ], 64 | "positive": [ 65 | "52", 66 | 0 67 | ], 68 | "negative": [ 69 | "52", 70 | 1 71 | ], 72 | "latent_image": [ 73 | "41", 74 | 2 75 | ], 76 | "optional_vae": [ 77 | "4", 78 | 2 79 | ] 80 | }, 81 | "class_type": "KSampler (Efficient)", 82 | "_meta": { 83 | "title": "KSampler (Efficient)" 84 | } 85 | }, 86 | "11": { 87 | "inputs": { 88 | "image": "subject.png", 89 | "upload": "image" 90 | }, 91 | "class_type": "LoadImage", 92 | "_meta": { 93 | "title": "Load Image" 94 | } 95 | }, 96 | "12": { 97 | "inputs": { 98 | "rembg_session": [ 99 | "13", 100 | 0 101 | ], 102 | "image": [ 103 | "60", 104 | 0 105 | ] 106 | }, 107 | "class_type": "ImageRemoveBackground+", 108 | "_meta": { 109 | "title": "🔧 Image Remove Background" 110 | } 111 | }, 112 | "13": { 113 | "inputs": { 114 | "model": "u2net: general purpose", 115 | "providers": "CUDA" 116 | }, 117 | "class_type": "RemBGSession+", 118 | "_meta": { 119 | "title": "🔧 RemBG Session" 120 | } 121 | }, 122 | "36": { 123 | "inputs": { 124 | "brighter": 1, 125 | "target_image": [ 126 | "60", 127 | 0 128 | ], 129 | "subject_mask": [ 130 | "37", 131 | 0 132 | ] 133 | }, 134 | "class_type": "ZeST: Grayout Subject", 135 | "_meta": { 136 | "title": "ZeST: Grayout Subject" 137 | } 138 | }, 139 | "37": { 140 | "inputs": { 141 | "amount": 6, 142 | "mask": [ 143 | "12", 144 | 1 145 | ] 146 | }, 147 | "class_type": "MaskBlur+", 148 | "_meta": { 149 | "title": "🔧 Mask Blur" 150 | } 151 | }, 152 | "39": { 153 | "inputs": { 154 | "mask": [ 155 | "37", 156 | 0 157 | ] 158 | }, 159 | "class_type": "MaskPreview+", 160 | "_meta": { 161 | "title": "🔧 Mask Preview" 162 | } 163 | }, 164 | "40": { 165 | "inputs": { 166 | "images": [ 167 | "36", 168 | 0 169 | ] 170 | }, 171 | "class_type": "PreviewImage", 172 | "_meta": { 173 | "title": "Preview Image" 174 | } 175 | }, 176 | "41": { 177 | "inputs": { 178 | "positive": [ 179 | "6", 180 | 0 181 | ], 182 | "negative": [ 183 | "7", 184 | 0 185 | ], 186 | "vae": [ 187 | "4", 188 | 2 189 | ], 190 | "pixels": [ 191 | "36", 192 | 0 193 | ], 194 | "mask": [ 195 | "37", 196 | 0 197 | ] 198 | }, 199 | "class_type": "InpaintModelConditioning", 200 | "_meta": { 201 | "title": "InpaintModelConditioning" 202 | } 203 | }, 204 | "44": { 205 | "inputs": { 206 | "preset": "STANDARD (medium strength)", 207 | "model": [ 208 | "4", 209 | 0 210 | ] 211 | }, 212 | "class_type": "IPAdapterUnifiedLoader", 213 | "_meta": { 214 | "title": "IPAdapter Unified Loader" 215 | } 216 | }, 217 | "45": { 218 | "inputs": { 219 | "weight": 1, 220 | "weight_type": "style transfer", 221 | "combine_embeds": "concat", 222 | "start_at": 0, 223 | "end_at": 1, 224 | "embeds_scaling": "V only", 225 | "model": [ 226 | "44", 227 | 0 228 | ], 229 | "ipadapter": [ 230 | "44", 231 | 1 232 | ], 233 | "image": [ 234 | "47", 235 | 0 236 | ] 237 | }, 238 | "class_type": "IPAdapterAdvanced", 239 | "_meta": { 240 | "title": "IPAdapter Advanced" 241 | } 242 | }, 243 | "46": { 244 | "inputs": { 245 | "image": "material.png", 246 | "upload": "image" 247 | }, 248 | "class_type": "LoadImage", 249 | "_meta": { 250 | "title": "Load Image" 251 | } 252 | }, 253 | "47": { 254 | "inputs": { 255 | "width": 1704, 256 | "height": 1704, 257 | "position": "center", 258 | "x_offset": 0, 259 | "y_offset": 0, 260 | "image": [ 261 | "46", 262 | 0 263 | ] 264 | }, 265 | "class_type": "ImageCrop+", 266 | "_meta": { 267 | "title": "🔧 Image Crop" 268 | } 269 | }, 270 | "48": { 271 | "inputs": { 272 | "model": [ 273 | "45", 274 | 0 275 | ] 276 | }, 277 | "class_type": "DifferentialDiffusion", 278 | "_meta": { 279 | "title": "Differential Diffusion" 280 | } 281 | }, 282 | "49": { 283 | "inputs": { 284 | "multiplier": 0.9, 285 | "model": [ 286 | "48", 287 | 0 288 | ] 289 | }, 290 | "class_type": "RescaleCFG", 291 | "_meta": { 292 | "title": "RescaleCFG" 293 | } 294 | }, 295 | "50": { 296 | "inputs": { 297 | "strength": 0.9, 298 | "start_percent": 0, 299 | "end_percent": 1, 300 | "positive": [ 301 | "41", 302 | 0 303 | ], 304 | "negative": [ 305 | "41", 306 | 1 307 | ], 308 | "control_net": [ 309 | "51", 310 | 0 311 | ], 312 | "image": [ 313 | "54", 314 | 0 315 | ] 316 | }, 317 | "class_type": "ControlNetApplyAdvanced", 318 | "_meta": { 319 | "title": "Apply ControlNet (Advanced)" 320 | } 321 | }, 322 | "51": { 323 | "inputs": { 324 | "control_net_name": "t2i-adapter_diffusers_xl_depth_midas.safetensors" 325 | }, 326 | "class_type": "ControlNetLoaderAdvanced", 327 | "_meta": { 328 | "title": "Load Advanced ControlNet Model 🛂🅐🅒🅝" 329 | } 330 | }, 331 | "52": { 332 | "inputs": { 333 | "strength": 0.35000000000000003, 334 | "start_percent": 0, 335 | "end_percent": 0.6, 336 | "positive": [ 337 | "50", 338 | 0 339 | ], 340 | "negative": [ 341 | "50", 342 | 1 343 | ], 344 | "control_net": [ 345 | "53", 346 | 0 347 | ], 348 | "image": [ 349 | "55", 350 | 0 351 | ] 352 | }, 353 | "class_type": "ControlNetApplyAdvanced", 354 | "_meta": { 355 | "title": "Apply ControlNet (Advanced)" 356 | } 357 | }, 358 | "53": { 359 | "inputs": { 360 | "control_net_name": "t2i-adapter_diffusers_xl_lineart.safetensors" 361 | }, 362 | "class_type": "ControlNetLoaderAdvanced", 363 | "_meta": { 364 | "title": "Load Advanced ControlNet Model 🛂🅐🅒🅝" 365 | } 366 | }, 367 | "54": { 368 | "inputs": { 369 | "ckpt_name": "depth_anything_vitl14.pth", 370 | "resolution": 1024, 371 | "image": [ 372 | "60", 373 | 0 374 | ] 375 | }, 376 | "class_type": "DepthAnythingPreprocessor", 377 | "_meta": { 378 | "title": "Depth Anything" 379 | } 380 | }, 381 | "55": { 382 | "inputs": { 383 | "coarse": "disable", 384 | "resolution": 512, 385 | "image": [ 386 | "60", 387 | 0 388 | ] 389 | }, 390 | "class_type": "LineArtPreprocessor", 391 | "_meta": { 392 | "title": "Realistic Lineart" 393 | } 394 | }, 395 | "56": { 396 | "inputs": { 397 | "images": [ 398 | "54", 399 | 0 400 | ] 401 | }, 402 | "class_type": "PreviewImage", 403 | "_meta": { 404 | "title": "Preview Image" 405 | } 406 | }, 407 | "57": { 408 | "inputs": { 409 | "images": [ 410 | "55", 411 | 0 412 | ] 413 | }, 414 | "class_type": "PreviewImage", 415 | "_meta": { 416 | "title": "Preview Image" 417 | } 418 | }, 419 | "58": { 420 | "inputs": { 421 | "images": [ 422 | "47", 423 | 0 424 | ] 425 | }, 426 | "class_type": "PreviewImage", 427 | "_meta": { 428 | "title": "Preview Image" 429 | } 430 | }, 431 | "60": { 432 | "inputs": { 433 | "width": 1920, 434 | "height": 1920, 435 | "interpolation": "nearest", 436 | "keep_proportion": true, 437 | "condition": "downscale if bigger", 438 | "multiple_of": 0, 439 | "image": [ 440 | "11", 441 | 0 442 | ] 443 | }, 444 | "class_type": "ImageResize+", 445 | "_meta": { 446 | "title": "🔧 Image Resize" 447 | } 448 | } 449 | } 450 | -------------------------------------------------------------------------------- /material_transfer_ui.json: -------------------------------------------------------------------------------- 1 | { 2 | "last_node_id": 60, 3 | "last_link_id": 116, 4 | "nodes": [ 5 | { 6 | "id": 48, 7 | "type": "DifferentialDiffusion", 8 | "pos": [ 9 | 2281, 10 | -77 11 | ], 12 | "size": { 13 | "0": 210, 14 | "1": 26 15 | }, 16 | "flags": {}, 17 | "order": 16, 18 | "mode": 0, 19 | "inputs": [ 20 | { 21 | "name": "model", 22 | "type": "MODEL", 23 | "link": 92 24 | } 25 | ], 26 | "outputs": [ 27 | { 28 | "name": "MODEL", 29 | "type": "MODEL", 30 | "links": [ 31 | 91 32 | ], 33 | "shape": 3, 34 | "slot_index": 0 35 | } 36 | ], 37 | "properties": { 38 | "Node name for S&R": "DifferentialDiffusion" 39 | } 40 | }, 41 | { 42 | "id": 41, 43 | "type": "InpaintModelConditioning", 44 | "pos": [ 45 | 1435, 46 | 311 47 | ], 48 | "size": { 49 | "0": 216.59999084472656, 50 | "1": 106 51 | }, 52 | "flags": {}, 53 | "order": 25, 54 | "mode": 0, 55 | "inputs": [ 56 | { 57 | "name": "positive", 58 | "type": "CONDITIONING", 59 | "link": 76 60 | }, 61 | { 62 | "name": "negative", 63 | "type": "CONDITIONING", 64 | "link": 77 65 | }, 66 | { 67 | "name": "vae", 68 | "type": "VAE", 69 | "link": 81 70 | }, 71 | { 72 | "name": "pixels", 73 | "type": "IMAGE", 74 | "link": 82 75 | }, 76 | { 77 | "name": "mask", 78 | "type": "MASK", 79 | "link": 83 80 | } 81 | ], 82 | "outputs": [ 83 | { 84 | "name": "positive", 85 | "type": "CONDITIONING", 86 | "links": [ 87 | 98 88 | ], 89 | "shape": 3, 90 | "slot_index": 0 91 | }, 92 | { 93 | "name": "negative", 94 | "type": "CONDITIONING", 95 | "links": [ 96 | 99 97 | ], 98 | "shape": 3, 99 | "slot_index": 1 100 | }, 101 | { 102 | "name": "latent", 103 | "type": "LATENT", 104 | "links": [ 105 | 80 106 | ], 107 | "shape": 3, 108 | "slot_index": 2 109 | } 110 | ], 111 | "properties": { 112 | "Node name for S&R": "InpaintModelConditioning" 113 | } 114 | }, 115 | { 116 | "id": 56, 117 | "type": "PreviewImage", 118 | "pos": [ 119 | 1855.801586521185, 120 | 950.5616556745784 121 | ], 122 | "size": { 123 | "0": 210, 124 | "1": 246 125 | }, 126 | "flags": {}, 127 | "order": 18, 128 | "mode": 0, 129 | "inputs": [ 130 | { 131 | "name": "images", 132 | "type": "IMAGE", 133 | "link": 106 134 | } 135 | ], 136 | "properties": { 137 | "Node name for S&R": "PreviewImage" 138 | } 139 | }, 140 | { 141 | "id": 55, 142 | "type": "LineArtPreprocessor", 143 | "pos": [ 144 | 2238.8015865211846, 145 | 477.56165567457896 146 | ], 147 | "size": { 148 | "0": 315, 149 | "1": 82 150 | }, 151 | "flags": {}, 152 | "order": 15, 153 | "mode": 0, 154 | "inputs": [ 155 | { 156 | "name": "image", 157 | "type": "IMAGE", 158 | "link": 116 159 | } 160 | ], 161 | "outputs": [ 162 | { 163 | "name": "IMAGE", 164 | "type": "IMAGE", 165 | "links": [ 166 | 104, 167 | 107 168 | ], 169 | "shape": 3, 170 | "slot_index": 0 171 | } 172 | ], 173 | "properties": { 174 | "Node name for S&R": "LineArtPreprocessor" 175 | }, 176 | "widgets_values": [ 177 | "disable", 178 | 512 179 | ] 180 | }, 181 | { 182 | "id": 57, 183 | "type": "PreviewImage", 184 | "pos": [ 185 | 2312.8015865211846, 186 | 953.5616556745784 187 | ], 188 | "size": { 189 | "0": 210, 190 | "1": 246 191 | }, 192 | "flags": {}, 193 | "order": 19, 194 | "mode": 0, 195 | "inputs": [ 196 | { 197 | "name": "images", 198 | "type": "IMAGE", 199 | "link": 107 200 | } 201 | ], 202 | "properties": { 203 | "Node name for S&R": "PreviewImage" 204 | } 205 | }, 206 | { 207 | "id": 52, 208 | "type": "ControlNetApplyAdvanced", 209 | "pos": [ 210 | 2232.8015865211846, 211 | 605.5616556745784 212 | ], 213 | "size": { 214 | "0": 315, 215 | "1": 166 216 | }, 217 | "flags": {}, 218 | "order": 27, 219 | "mode": 0, 220 | "inputs": [ 221 | { 222 | "name": "positive", 223 | "type": "CONDITIONING", 224 | "link": 96 225 | }, 226 | { 227 | "name": "negative", 228 | "type": "CONDITIONING", 229 | "link": 97 230 | }, 231 | { 232 | "name": "control_net", 233 | "type": "CONTROL_NET", 234 | "link": 95, 235 | "slot_index": 2 236 | }, 237 | { 238 | "name": "image", 239 | "type": "IMAGE", 240 | "link": 104, 241 | "slot_index": 3 242 | } 243 | ], 244 | "outputs": [ 245 | { 246 | "name": "positive", 247 | "type": "CONDITIONING", 248 | "links": [ 249 | 100 250 | ], 251 | "shape": 3, 252 | "slot_index": 0 253 | }, 254 | { 255 | "name": "negative", 256 | "type": "CONDITIONING", 257 | "links": [ 258 | 101 259 | ], 260 | "shape": 3, 261 | "slot_index": 1 262 | } 263 | ], 264 | "properties": { 265 | "Node name for S&R": "ControlNetApplyAdvanced" 266 | }, 267 | "widgets_values": [ 268 | 0.35000000000000003, 269 | 0, 270 | 0.6 271 | ] 272 | }, 273 | { 274 | "id": 50, 275 | "type": "ControlNetApplyAdvanced", 276 | "pos": [ 277 | 1812.801586521185, 278 | 615.5616556745784 279 | ], 280 | "size": { 281 | "0": 315, 282 | "1": 166 283 | }, 284 | "flags": {}, 285 | "order": 26, 286 | "mode": 0, 287 | "inputs": [ 288 | { 289 | "name": "positive", 290 | "type": "CONDITIONING", 291 | "link": 98 292 | }, 293 | { 294 | "name": "negative", 295 | "type": "CONDITIONING", 296 | "link": 99 297 | }, 298 | { 299 | "name": "control_net", 300 | "type": "CONTROL_NET", 301 | "link": 94, 302 | "slot_index": 2 303 | }, 304 | { 305 | "name": "image", 306 | "type": "IMAGE", 307 | "link": 102, 308 | "slot_index": 3 309 | } 310 | ], 311 | "outputs": [ 312 | { 313 | "name": "positive", 314 | "type": "CONDITIONING", 315 | "links": [ 316 | 96 317 | ], 318 | "shape": 3, 319 | "slot_index": 0 320 | }, 321 | { 322 | "name": "negative", 323 | "type": "CONDITIONING", 324 | "links": [ 325 | 97 326 | ], 327 | "shape": 3, 328 | "slot_index": 1 329 | } 330 | ], 331 | "properties": { 332 | "Node name for S&R": "ControlNetApplyAdvanced" 333 | }, 334 | "widgets_values": [ 335 | 0.9, 336 | 0, 337 | 1 338 | ] 339 | }, 340 | { 341 | "id": 47, 342 | "type": "ImageCrop+", 343 | "pos": [ 344 | 908, 345 | -505 346 | ], 347 | "size": { 348 | "0": 315, 349 | "1": 194 350 | }, 351 | "flags": {}, 352 | "order": 9, 353 | "mode": 0, 354 | "inputs": [ 355 | { 356 | "name": "image", 357 | "type": "IMAGE", 358 | "link": 89 359 | } 360 | ], 361 | "outputs": [ 362 | { 363 | "name": "IMAGE", 364 | "type": "IMAGE", 365 | "links": [ 366 | 90, 367 | 108 368 | ], 369 | "shape": 3, 370 | "slot_index": 0 371 | }, 372 | { 373 | "name": "x", 374 | "type": "INT", 375 | "links": null, 376 | "shape": 3 377 | }, 378 | { 379 | "name": "y", 380 | "type": "INT", 381 | "links": null, 382 | "shape": 3 383 | } 384 | ], 385 | "properties": { 386 | "Node name for S&R": "ImageCrop+" 387 | }, 388 | "widgets_values": [ 389 | 1704, 390 | 1704, 391 | "center", 392 | 0, 393 | 0 394 | ] 395 | }, 396 | { 397 | "id": 58, 398 | "type": "PreviewImage", 399 | "pos": [ 400 | 1425.6055912475588, 401 | -500.04130996704095 402 | ], 403 | "size": { 404 | "0": 210, 405 | "1": 246 406 | }, 407 | "flags": {}, 408 | "order": 12, 409 | "mode": 0, 410 | "inputs": [ 411 | { 412 | "name": "images", 413 | "type": "IMAGE", 414 | "link": 108 415 | } 416 | ], 417 | "properties": { 418 | "Node name for S&R": "PreviewImage" 419 | } 420 | }, 421 | { 422 | "id": 49, 423 | "type": "RescaleCFG", 424 | "pos": [ 425 | 2180, 426 | 0 427 | ], 428 | "size": { 429 | "0": 315, 430 | "1": 58 431 | }, 432 | "flags": {}, 433 | "order": 20, 434 | "mode": 0, 435 | "inputs": [ 436 | { 437 | "name": "model", 438 | "type": "MODEL", 439 | "link": 91 440 | } 441 | ], 442 | "outputs": [ 443 | { 444 | "name": "MODEL", 445 | "type": "MODEL", 446 | "links": [ 447 | 93 448 | ], 449 | "shape": 3, 450 | "slot_index": 0 451 | } 452 | ], 453 | "properties": { 454 | "Node name for S&R": "RescaleCFG" 455 | }, 456 | "widgets_values": [ 457 | 0.9 458 | ] 459 | }, 460 | { 461 | "id": 45, 462 | "type": "IPAdapterAdvanced", 463 | "pos": [ 464 | 1353, 465 | -146 466 | ], 467 | "size": { 468 | "0": 315, 469 | "1": 278 470 | }, 471 | "flags": {}, 472 | "order": 11, 473 | "mode": 0, 474 | "inputs": [ 475 | { 476 | "name": "model", 477 | "type": "MODEL", 478 | "link": 110 479 | }, 480 | { 481 | "name": "ipadapter", 482 | "type": "IPADAPTER", 483 | "link": 111 484 | }, 485 | { 486 | "name": "image", 487 | "type": "IMAGE", 488 | "link": 90, 489 | "slot_index": 2 490 | }, 491 | { 492 | "name": "image_negative", 493 | "type": "IMAGE", 494 | "link": null 495 | }, 496 | { 497 | "name": "attn_mask", 498 | "type": "MASK", 499 | "link": null 500 | }, 501 | { 502 | "name": "clip_vision", 503 | "type": "CLIP_VISION", 504 | "link": null 505 | } 506 | ], 507 | "outputs": [ 508 | { 509 | "name": "MODEL", 510 | "type": "MODEL", 511 | "links": [ 512 | 92 513 | ], 514 | "shape": 3, 515 | "slot_index": 0 516 | } 517 | ], 518 | "properties": { 519 | "Node name for S&R": "IPAdapterAdvanced" 520 | }, 521 | "widgets_values": [ 522 | 1, 523 | "style transfer", 524 | "concat", 525 | 0, 526 | 1, 527 | "V only" 528 | ] 529 | }, 530 | { 531 | "id": 44, 532 | "type": "IPAdapterUnifiedLoader", 533 | "pos": [ 534 | 571, 535 | -143 536 | ], 537 | "size": { 538 | "0": 315, 539 | "1": 78 540 | }, 541 | "flags": {}, 542 | "order": 6, 543 | "mode": 0, 544 | "inputs": [ 545 | { 546 | "name": "model", 547 | "type": "MODEL", 548 | "link": 86 549 | }, 550 | { 551 | "name": "ipadapter", 552 | "type": "IPADAPTER", 553 | "link": null 554 | } 555 | ], 556 | "outputs": [ 557 | { 558 | "name": "model", 559 | "type": "MODEL", 560 | "links": [ 561 | 110 562 | ], 563 | "shape": 3, 564 | "slot_index": 0 565 | }, 566 | { 567 | "name": "ipadapter", 568 | "type": "IPADAPTER", 569 | "links": [ 570 | 111 571 | ], 572 | "shape": 3, 573 | "slot_index": 1 574 | } 575 | ], 576 | "properties": { 577 | "Node name for S&R": "IPAdapterUnifiedLoader" 578 | }, 579 | "widgets_values": [ 580 | "STANDARD (medium strength)" 581 | ] 582 | }, 583 | { 584 | "id": 39, 585 | "type": "MaskPreview+", 586 | "pos": [ 587 | 1193, 588 | 947 589 | ], 590 | "size": { 591 | "0": 210, 592 | "1": 246 593 | }, 594 | "flags": {}, 595 | "order": 23, 596 | "mode": 0, 597 | "inputs": [ 598 | { 599 | "name": "mask", 600 | "type": "MASK", 601 | "link": 74 602 | } 603 | ], 604 | "properties": { 605 | "Node name for S&R": "MaskPreview+" 606 | } 607 | }, 608 | { 609 | "id": 40, 610 | "type": "PreviewImage", 611 | "pos": [ 612 | 1443, 613 | 974 614 | ], 615 | "size": { 616 | "0": 210, 617 | "1": 246 618 | }, 619 | "flags": {}, 620 | "order": 24, 621 | "mode": 0, 622 | "inputs": [ 623 | { 624 | "name": "images", 625 | "type": "IMAGE", 626 | "link": 75 627 | } 628 | ], 629 | "properties": { 630 | "Node name for S&R": "PreviewImage" 631 | } 632 | }, 633 | { 634 | "id": 36, 635 | "type": "ZeST: Grayout Subject", 636 | "pos": [ 637 | 1191, 638 | 738 639 | ], 640 | "size": { 641 | "0": 315, 642 | "1": 78 643 | }, 644 | "flags": {}, 645 | "order": 22, 646 | "mode": 0, 647 | "inputs": [ 648 | { 649 | "name": "target_image", 650 | "type": "IMAGE", 651 | "link": 114 652 | }, 653 | { 654 | "name": "subject_mask", 655 | "type": "MASK", 656 | "link": 73 657 | } 658 | ], 659 | "outputs": [ 660 | { 661 | "name": "IMAGE", 662 | "type": "IMAGE", 663 | "links": [ 664 | 75, 665 | 82 666 | ], 667 | "shape": 3, 668 | "slot_index": 0 669 | } 670 | ], 671 | "properties": { 672 | "Node name for S&R": "ZeST: Grayout Subject" 673 | }, 674 | "widgets_values": [ 675 | 1 676 | ] 677 | }, 678 | { 679 | "id": 37, 680 | "type": "MaskBlur+", 681 | "pos": [ 682 | 846, 683 | 1032 684 | ], 685 | "size": { 686 | "0": 315, 687 | "1": 58 688 | }, 689 | "flags": {}, 690 | "order": 21, 691 | "mode": 0, 692 | "inputs": [ 693 | { 694 | "name": "mask", 695 | "type": "MASK", 696 | "link": 72 697 | } 698 | ], 699 | "outputs": [ 700 | { 701 | "name": "MASK", 702 | "type": "MASK", 703 | "links": [ 704 | 73, 705 | 74, 706 | 83 707 | ], 708 | "shape": 3, 709 | "slot_index": 0 710 | } 711 | ], 712 | "properties": { 713 | "Node name for S&R": "MaskBlur+" 714 | }, 715 | "widgets_values": [ 716 | 6 717 | ] 718 | }, 719 | { 720 | "id": 38, 721 | "type": "GrowMask", 722 | "pos": [ 723 | 856, 724 | 897 725 | ], 726 | "size": { 727 | "0": 315, 728 | "1": 82 729 | }, 730 | "flags": {}, 731 | "order": 17, 732 | "mode": 4, 733 | "inputs": [ 734 | { 735 | "name": "mask", 736 | "type": "MASK", 737 | "link": 71 738 | } 739 | ], 740 | "outputs": [ 741 | { 742 | "name": "MASK", 743 | "type": "MASK", 744 | "links": [ 745 | 72 746 | ], 747 | "shape": 3, 748 | "slot_index": 0 749 | } 750 | ], 751 | "properties": { 752 | "Node name for S&R": "GrowMask" 753 | }, 754 | "widgets_values": [ 755 | 0, 756 | true 757 | ] 758 | }, 759 | { 760 | "id": 12, 761 | "type": "ImageRemoveBackground+", 762 | "pos": [ 763 | 708, 764 | 795 765 | ], 766 | "size": { 767 | "0": 241.79998779296875, 768 | "1": 46 769 | }, 770 | "flags": {}, 771 | "order": 13, 772 | "mode": 0, 773 | "inputs": [ 774 | { 775 | "name": "rembg_session", 776 | "type": "REMBG_SESSION", 777 | "link": 15, 778 | "slot_index": 0 779 | }, 780 | { 781 | "name": "image", 782 | "type": "IMAGE", 783 | "link": 113 784 | } 785 | ], 786 | "outputs": [ 787 | { 788 | "name": "IMAGE", 789 | "type": "IMAGE", 790 | "links": [], 791 | "shape": 3, 792 | "slot_index": 0 793 | }, 794 | { 795 | "name": "MASK", 796 | "type": "MASK", 797 | "links": [ 798 | 71 799 | ], 800 | "shape": 3, 801 | "slot_index": 1 802 | } 803 | ], 804 | "properties": { 805 | "Node name for S&R": "ImageRemoveBackground+" 806 | } 807 | }, 808 | { 809 | "id": 13, 810 | "type": "RemBGSession+", 811 | "pos": [ 812 | 489, 813 | 923 814 | ], 815 | "size": { 816 | "0": 315, 817 | "1": 82 818 | }, 819 | "flags": {}, 820 | "order": 0, 821 | "mode": 0, 822 | "outputs": [ 823 | { 824 | "name": "REMBG_SESSION", 825 | "type": "REMBG_SESSION", 826 | "links": [ 827 | 15 828 | ], 829 | "shape": 3 830 | } 831 | ], 832 | "properties": { 833 | "Node name for S&R": "RemBGSession+" 834 | }, 835 | "widgets_values": [ 836 | "u2net: general purpose", 837 | "CUDA" 838 | ] 839 | }, 840 | { 841 | "id": 54, 842 | "type": "DepthAnythingPreprocessor", 843 | "pos": [ 844 | 1813.801586521185, 845 | 481.56165567457896 846 | ], 847 | "size": { 848 | "0": 315, 849 | "1": 82 850 | }, 851 | "flags": {}, 852 | "order": 14, 853 | "mode": 0, 854 | "inputs": [ 855 | { 856 | "name": "image", 857 | "type": "IMAGE", 858 | "link": 115 859 | } 860 | ], 861 | "outputs": [ 862 | { 863 | "name": "IMAGE", 864 | "type": "IMAGE", 865 | "links": [ 866 | 102, 867 | 106 868 | ], 869 | "shape": 3, 870 | "slot_index": 0 871 | } 872 | ], 873 | "properties": { 874 | "Node name for S&R": "DepthAnythingPreprocessor" 875 | }, 876 | "widgets_values": [ 877 | "depth_anything_vitl14.pth", 878 | 1024 879 | ] 880 | }, 881 | { 882 | "id": 4, 883 | "type": "CheckpointLoaderSimple", 884 | "pos": [ 885 | 26, 886 | 474 887 | ], 888 | "size": { 889 | "0": 315, 890 | "1": 98 891 | }, 892 | "flags": {}, 893 | "order": 1, 894 | "mode": 0, 895 | "outputs": [ 896 | { 897 | "name": "MODEL", 898 | "type": "MODEL", 899 | "links": [ 900 | 86 901 | ], 902 | "slot_index": 0 903 | }, 904 | { 905 | "name": "CLIP", 906 | "type": "CLIP", 907 | "links": [ 908 | 3, 909 | 5 910 | ], 911 | "slot_index": 1 912 | }, 913 | { 914 | "name": "VAE", 915 | "type": "VAE", 916 | "links": [ 917 | 32, 918 | 81 919 | ], 920 | "slot_index": 2 921 | } 922 | ], 923 | "properties": { 924 | "Node name for S&R": "CheckpointLoaderSimple" 925 | }, 926 | "widgets_values": [ 927 | "juggernautXL_juggernautX.safetensors" 928 | ] 929 | }, 930 | { 931 | "id": 51, 932 | "type": "ControlNetLoaderAdvanced", 933 | "pos": [ 934 | 1812.801586521185, 935 | 835.5616556745784 936 | ], 937 | "size": { 938 | "0": 367.79998779296875, 939 | "1": 58 940 | }, 941 | "flags": {}, 942 | "order": 2, 943 | "mode": 0, 944 | "inputs": [ 945 | { 946 | "name": "timestep_keyframe", 947 | "type": "TIMESTEP_KEYFRAME", 948 | "link": null 949 | } 950 | ], 951 | "outputs": [ 952 | { 953 | "name": "CONTROL_NET", 954 | "type": "CONTROL_NET", 955 | "links": [ 956 | 94 957 | ], 958 | "shape": 3 959 | } 960 | ], 961 | "properties": { 962 | "Node name for S&R": "ControlNetLoaderAdvanced" 963 | }, 964 | "widgets_values": [ 965 | "t2i-adapter_diffusers_xl_depth_midas.safetensors" 966 | ] 967 | }, 968 | { 969 | "id": 53, 970 | "type": "ControlNetLoaderAdvanced", 971 | "pos": [ 972 | 2232.8015865211846, 973 | 825.5616556745784 974 | ], 975 | "size": { 976 | "0": 367.79998779296875, 977 | "1": 58 978 | }, 979 | "flags": {}, 980 | "order": 3, 981 | "mode": 0, 982 | "inputs": [ 983 | { 984 | "name": "timestep_keyframe", 985 | "type": "TIMESTEP_KEYFRAME", 986 | "link": null 987 | } 988 | ], 989 | "outputs": [ 990 | { 991 | "name": "CONTROL_NET", 992 | "type": "CONTROL_NET", 993 | "links": [ 994 | 95 995 | ], 996 | "shape": 3 997 | } 998 | ], 999 | "properties": { 1000 | "Node name for S&R": "ControlNetLoaderAdvanced" 1001 | }, 1002 | "widgets_values": [ 1003 | "t2i-adapter_diffusers_xl_lineart.safetensors" 1004 | ] 1005 | }, 1006 | { 1007 | "id": 7, 1008 | "type": "CLIPTextEncode", 1009 | "pos": [ 1010 | 413, 1011 | 389 1012 | ], 1013 | "size": { 1014 | "0": 425.27801513671875, 1015 | "1": 180.6060791015625 1016 | }, 1017 | "flags": {}, 1018 | "order": 8, 1019 | "mode": 0, 1020 | "inputs": [ 1021 | { 1022 | "name": "clip", 1023 | "type": "CLIP", 1024 | "link": 5 1025 | } 1026 | ], 1027 | "outputs": [ 1028 | { 1029 | "name": "CONDITIONING", 1030 | "type": "CONDITIONING", 1031 | "links": [ 1032 | 77 1033 | ], 1034 | "slot_index": 0 1035 | } 1036 | ], 1037 | "properties": { 1038 | "Node name for S&R": "CLIPTextEncode" 1039 | }, 1040 | "widgets_values": [ 1041 | "" 1042 | ] 1043 | }, 1044 | { 1045 | "id": 46, 1046 | "type": "LoadImage", 1047 | "pos": [ 1048 | 538, 1049 | -558 1050 | ], 1051 | "size": { 1052 | "0": 315, 1053 | "1": 314 1054 | }, 1055 | "flags": {}, 1056 | "order": 4, 1057 | "mode": 0, 1058 | "outputs": [ 1059 | { 1060 | "name": "IMAGE", 1061 | "type": "IMAGE", 1062 | "links": [ 1063 | 89 1064 | ], 1065 | "shape": 3, 1066 | "slot_index": 0 1067 | }, 1068 | { 1069 | "name": "MASK", 1070 | "type": "MASK", 1071 | "links": null, 1072 | "shape": 3 1073 | } 1074 | ], 1075 | "properties": { 1076 | "Node name for S&R": "LoadImage" 1077 | }, 1078 | "widgets_values": [ 1079 | "material.png", 1080 | "image" 1081 | ] 1082 | }, 1083 | { 1084 | "id": 6, 1085 | "type": "CLIPTextEncode", 1086 | "pos": [ 1087 | 415, 1088 | 186 1089 | ], 1090 | "size": { 1091 | "0": 422.84503173828125, 1092 | "1": 164.31304931640625 1093 | }, 1094 | "flags": {}, 1095 | "order": 7, 1096 | "mode": 0, 1097 | "inputs": [ 1098 | { 1099 | "name": "clip", 1100 | "type": "CLIP", 1101 | "link": 3 1102 | } 1103 | ], 1104 | "outputs": [ 1105 | { 1106 | "name": "CONDITIONING", 1107 | "type": "CONDITIONING", 1108 | "links": [ 1109 | 76 1110 | ], 1111 | "slot_index": 0 1112 | } 1113 | ], 1114 | "properties": { 1115 | "Node name for S&R": "CLIPTextEncode" 1116 | }, 1117 | "widgets_values": [ 1118 | "sculpture" 1119 | ] 1120 | }, 1121 | { 1122 | "id": 9, 1123 | "type": "SaveImage", 1124 | "pos": [ 1125 | 3087, 1126 | 295 1127 | ], 1128 | "size": { 1129 | "0": 390.46612548828125, 1130 | "1": 339.4088134765625 1131 | }, 1132 | "flags": {}, 1133 | "order": 29, 1134 | "mode": 0, 1135 | "inputs": [ 1136 | { 1137 | "name": "images", 1138 | "type": "IMAGE", 1139 | "link": 13 1140 | } 1141 | ], 1142 | "properties": { 1143 | "Node name for S&R": "SaveImage" 1144 | }, 1145 | "widgets_values": [ 1146 | "ComfyUI" 1147 | ] 1148 | }, 1149 | { 1150 | "id": 11, 1151 | "type": "LoadImage", 1152 | "pos": [ 1153 | -543, 1154 | 649 1155 | ], 1156 | "size": { 1157 | "0": 315, 1158 | "1": 314 1159 | }, 1160 | "flags": {}, 1161 | "order": 5, 1162 | "mode": 0, 1163 | "outputs": [ 1164 | { 1165 | "name": "IMAGE", 1166 | "type": "IMAGE", 1167 | "links": [ 1168 | 112 1169 | ], 1170 | "shape": 3, 1171 | "slot_index": 0 1172 | }, 1173 | { 1174 | "name": "MASK", 1175 | "type": "MASK", 1176 | "links": null, 1177 | "shape": 3 1178 | } 1179 | ], 1180 | "properties": { 1181 | "Node name for S&R": "LoadImage" 1182 | }, 1183 | "widgets_values": [ 1184 | "subject.png", 1185 | "image" 1186 | ] 1187 | }, 1188 | { 1189 | "id": 60, 1190 | "type": "ImageResize+", 1191 | "pos": [ 1192 | -101, 1193 | 696 1194 | ], 1195 | "size": { 1196 | "0": 315, 1197 | "1": 218 1198 | }, 1199 | "flags": {}, 1200 | "order": 10, 1201 | "mode": 0, 1202 | "inputs": [ 1203 | { 1204 | "name": "image", 1205 | "type": "IMAGE", 1206 | "link": 112 1207 | } 1208 | ], 1209 | "outputs": [ 1210 | { 1211 | "name": "IMAGE", 1212 | "type": "IMAGE", 1213 | "links": [ 1214 | 113, 1215 | 114, 1216 | 115, 1217 | 116 1218 | ], 1219 | "shape": 3, 1220 | "slot_index": 0 1221 | }, 1222 | { 1223 | "name": "width", 1224 | "type": "INT", 1225 | "links": null, 1226 | "shape": 3 1227 | }, 1228 | { 1229 | "name": "height", 1230 | "type": "INT", 1231 | "links": null, 1232 | "shape": 3 1233 | } 1234 | ], 1235 | "properties": { 1236 | "Node name for S&R": "ImageResize+" 1237 | }, 1238 | "widgets_values": [ 1239 | 1920, 1240 | 1920, 1241 | "nearest", 1242 | true, 1243 | "downscale if bigger", 1244 | 0 1245 | ] 1246 | }, 1247 | { 1248 | "id": 10, 1249 | "type": "KSampler (Efficient)", 1250 | "pos": [ 1251 | 2671, 1252 | 2 1253 | ], 1254 | "size": { 1255 | "0": 325, 1256 | "1": 562 1257 | }, 1258 | "flags": {}, 1259 | "order": 28, 1260 | "mode": 0, 1261 | "inputs": [ 1262 | { 1263 | "name": "model", 1264 | "type": "MODEL", 1265 | "link": 93 1266 | }, 1267 | { 1268 | "name": "positive", 1269 | "type": "CONDITIONING", 1270 | "link": 100 1271 | }, 1272 | { 1273 | "name": "negative", 1274 | "type": "CONDITIONING", 1275 | "link": 101 1276 | }, 1277 | { 1278 | "name": "latent_image", 1279 | "type": "LATENT", 1280 | "link": 80 1281 | }, 1282 | { 1283 | "name": "optional_vae", 1284 | "type": "VAE", 1285 | "link": 32 1286 | }, 1287 | { 1288 | "name": "script", 1289 | "type": "SCRIPT", 1290 | "link": null 1291 | } 1292 | ], 1293 | "outputs": [ 1294 | { 1295 | "name": "MODEL", 1296 | "type": "MODEL", 1297 | "links": null, 1298 | "shape": 3 1299 | }, 1300 | { 1301 | "name": "CONDITIONING+", 1302 | "type": "CONDITIONING", 1303 | "links": null, 1304 | "shape": 3 1305 | }, 1306 | { 1307 | "name": "CONDITIONING-", 1308 | "type": "CONDITIONING", 1309 | "links": null, 1310 | "shape": 3 1311 | }, 1312 | { 1313 | "name": "LATENT", 1314 | "type": "LATENT", 1315 | "links": null, 1316 | "shape": 3 1317 | }, 1318 | { 1319 | "name": "VAE", 1320 | "type": "VAE", 1321 | "links": null, 1322 | "shape": 3 1323 | }, 1324 | { 1325 | "name": "IMAGE", 1326 | "type": "IMAGE", 1327 | "links": [ 1328 | 13 1329 | ], 1330 | "shape": 3, 1331 | "slot_index": 5 1332 | } 1333 | ], 1334 | "properties": { 1335 | "Node name for S&R": "KSampler (Efficient)" 1336 | }, 1337 | "widgets_values": [ 1338 | 182541674078463, 1339 | null, 1340 | 6, 1341 | 2, 1342 | "dpmpp_sde", 1343 | "karras", 1344 | 1, 1345 | "auto", 1346 | "true" 1347 | ], 1348 | "color": "#223333", 1349 | "bgcolor": "#335555", 1350 | "shape": 1 1351 | } 1352 | ], 1353 | "links": [ 1354 | [ 1355 | 3, 1356 | 4, 1357 | 1, 1358 | 6, 1359 | 0, 1360 | "CLIP" 1361 | ], 1362 | [ 1363 | 5, 1364 | 4, 1365 | 1, 1366 | 7, 1367 | 0, 1368 | "CLIP" 1369 | ], 1370 | [ 1371 | 13, 1372 | 10, 1373 | 5, 1374 | 9, 1375 | 0, 1376 | "IMAGE" 1377 | ], 1378 | [ 1379 | 15, 1380 | 13, 1381 | 0, 1382 | 12, 1383 | 0, 1384 | "REMBG_SESSION" 1385 | ], 1386 | [ 1387 | 32, 1388 | 4, 1389 | 2, 1390 | 10, 1391 | 4, 1392 | "VAE" 1393 | ], 1394 | [ 1395 | 71, 1396 | 12, 1397 | 1, 1398 | 38, 1399 | 0, 1400 | "MASK" 1401 | ], 1402 | [ 1403 | 72, 1404 | 38, 1405 | 0, 1406 | 37, 1407 | 0, 1408 | "MASK" 1409 | ], 1410 | [ 1411 | 73, 1412 | 37, 1413 | 0, 1414 | 36, 1415 | 1, 1416 | "MASK" 1417 | ], 1418 | [ 1419 | 74, 1420 | 37, 1421 | 0, 1422 | 39, 1423 | 0, 1424 | "MASK" 1425 | ], 1426 | [ 1427 | 75, 1428 | 36, 1429 | 0, 1430 | 40, 1431 | 0, 1432 | "IMAGE" 1433 | ], 1434 | [ 1435 | 76, 1436 | 6, 1437 | 0, 1438 | 41, 1439 | 0, 1440 | "CONDITIONING" 1441 | ], 1442 | [ 1443 | 77, 1444 | 7, 1445 | 0, 1446 | 41, 1447 | 1, 1448 | "CONDITIONING" 1449 | ], 1450 | [ 1451 | 80, 1452 | 41, 1453 | 2, 1454 | 10, 1455 | 3, 1456 | "LATENT" 1457 | ], 1458 | [ 1459 | 81, 1460 | 4, 1461 | 2, 1462 | 41, 1463 | 2, 1464 | "VAE" 1465 | ], 1466 | [ 1467 | 82, 1468 | 36, 1469 | 0, 1470 | 41, 1471 | 3, 1472 | "IMAGE" 1473 | ], 1474 | [ 1475 | 83, 1476 | 37, 1477 | 0, 1478 | 41, 1479 | 4, 1480 | "MASK" 1481 | ], 1482 | [ 1483 | 86, 1484 | 4, 1485 | 0, 1486 | 44, 1487 | 0, 1488 | "MODEL" 1489 | ], 1490 | [ 1491 | 89, 1492 | 46, 1493 | 0, 1494 | 47, 1495 | 0, 1496 | "IMAGE" 1497 | ], 1498 | [ 1499 | 90, 1500 | 47, 1501 | 0, 1502 | 45, 1503 | 2, 1504 | "IMAGE" 1505 | ], 1506 | [ 1507 | 91, 1508 | 48, 1509 | 0, 1510 | 49, 1511 | 0, 1512 | "MODEL" 1513 | ], 1514 | [ 1515 | 92, 1516 | 45, 1517 | 0, 1518 | 48, 1519 | 0, 1520 | "MODEL" 1521 | ], 1522 | [ 1523 | 93, 1524 | 49, 1525 | 0, 1526 | 10, 1527 | 0, 1528 | "MODEL" 1529 | ], 1530 | [ 1531 | 94, 1532 | 51, 1533 | 0, 1534 | 50, 1535 | 2, 1536 | "CONTROL_NET" 1537 | ], 1538 | [ 1539 | 95, 1540 | 53, 1541 | 0, 1542 | 52, 1543 | 2, 1544 | "CONTROL_NET" 1545 | ], 1546 | [ 1547 | 96, 1548 | 50, 1549 | 0, 1550 | 52, 1551 | 0, 1552 | "CONDITIONING" 1553 | ], 1554 | [ 1555 | 97, 1556 | 50, 1557 | 1, 1558 | 52, 1559 | 1, 1560 | "CONDITIONING" 1561 | ], 1562 | [ 1563 | 98, 1564 | 41, 1565 | 0, 1566 | 50, 1567 | 0, 1568 | "CONDITIONING" 1569 | ], 1570 | [ 1571 | 99, 1572 | 41, 1573 | 1, 1574 | 50, 1575 | 1, 1576 | "CONDITIONING" 1577 | ], 1578 | [ 1579 | 100, 1580 | 52, 1581 | 0, 1582 | 10, 1583 | 1, 1584 | "CONDITIONING" 1585 | ], 1586 | [ 1587 | 101, 1588 | 52, 1589 | 1, 1590 | 10, 1591 | 2, 1592 | "CONDITIONING" 1593 | ], 1594 | [ 1595 | 102, 1596 | 54, 1597 | 0, 1598 | 50, 1599 | 3, 1600 | "IMAGE" 1601 | ], 1602 | [ 1603 | 104, 1604 | 55, 1605 | 0, 1606 | 52, 1607 | 3, 1608 | "IMAGE" 1609 | ], 1610 | [ 1611 | 106, 1612 | 54, 1613 | 0, 1614 | 56, 1615 | 0, 1616 | "IMAGE" 1617 | ], 1618 | [ 1619 | 107, 1620 | 55, 1621 | 0, 1622 | 57, 1623 | 0, 1624 | "IMAGE" 1625 | ], 1626 | [ 1627 | 108, 1628 | 47, 1629 | 0, 1630 | 58, 1631 | 0, 1632 | "IMAGE" 1633 | ], 1634 | [ 1635 | 110, 1636 | 44, 1637 | 0, 1638 | 45, 1639 | 0, 1640 | "MODEL" 1641 | ], 1642 | [ 1643 | 111, 1644 | 44, 1645 | 1, 1646 | 45, 1647 | 1, 1648 | "IPADAPTER" 1649 | ], 1650 | [ 1651 | 112, 1652 | 11, 1653 | 0, 1654 | 60, 1655 | 0, 1656 | "IMAGE" 1657 | ], 1658 | [ 1659 | 113, 1660 | 60, 1661 | 0, 1662 | 12, 1663 | 1, 1664 | "IMAGE" 1665 | ], 1666 | [ 1667 | 114, 1668 | 60, 1669 | 0, 1670 | 36, 1671 | 0, 1672 | "IMAGE" 1673 | ], 1674 | [ 1675 | 115, 1676 | 60, 1677 | 0, 1678 | 54, 1679 | 0, 1680 | "IMAGE" 1681 | ], 1682 | [ 1683 | 116, 1684 | 60, 1685 | 0, 1686 | 55, 1687 | 0, 1688 | "IMAGE" 1689 | ] 1690 | ], 1691 | "groups": [ 1692 | { 1693 | "title": "Masking", 1694 | "bounding": [ 1695 | 462, 1696 | 668, 1697 | 1225, 1698 | 552 1699 | ], 1700 | "color": "#3f789e", 1701 | "font_size": 24, 1702 | "locked": false 1703 | }, 1704 | { 1705 | "title": "IPAdapter", 1706 | "bounding": [ 1707 | 462, 1708 | -691, 1709 | 1399, 1710 | 832 1711 | ], 1712 | "color": "#3f789e", 1713 | "font_size": 24, 1714 | "locked": false 1715 | }, 1716 | { 1717 | "title": "ControlNets", 1718 | "bounding": [ 1719 | 1776, 1720 | 347, 1721 | 831, 1722 | 901 1723 | ], 1724 | "color": "#3f789e", 1725 | "font_size": 24, 1726 | "locked": false 1727 | } 1728 | ], 1729 | "config": {}, 1730 | "extra": { 1731 | "0246.VERSION": [ 1732 | 0, 1733 | 0, 1734 | 4 1735 | ] 1736 | }, 1737 | "version": 0.4 1738 | } 1739 | -------------------------------------------------------------------------------- /predict.py: -------------------------------------------------------------------------------- 1 | import os 2 | import shutil 3 | import random 4 | import json 5 | import mimetypes 6 | from PIL import Image 7 | from typing import List 8 | from cog import BasePredictor, Input, Path 9 | from helpers.comfyui import ComfyUI 10 | 11 | OUTPUT_DIR = "/tmp/outputs" 12 | INPUT_DIR = "/tmp/inputs" 13 | COMFYUI_TEMP_OUTPUT_DIR = "ComfyUI/temp" 14 | 15 | mimetypes.add_type("image/webp", ".webp") 16 | 17 | 18 | class Predictor(BasePredictor): 19 | def setup(self): 20 | self.comfyUI = ComfyUI("127.0.0.1:8188") 21 | with open("material_transfer_api.json", "r") as file: 22 | default_workflow = file.read() 23 | 24 | self.comfyUI.handle_weights(json.loads(default_workflow)) 25 | self.comfyUI.start_server(OUTPUT_DIR, INPUT_DIR) 26 | 27 | def cleanup(self): 28 | self.comfyUI.clear_queue() 29 | for directory in [OUTPUT_DIR, INPUT_DIR, COMFYUI_TEMP_OUTPUT_DIR]: 30 | if os.path.exists(directory): 31 | shutil.rmtree(directory) 32 | os.makedirs(directory) 33 | 34 | def handle_input_file(self, input_file: Path, filename: str = "image.png"): 35 | image = Image.open(input_file) 36 | image.save(os.path.join(INPUT_DIR, filename)) 37 | 38 | def log_and_collect_files(self, directory, prefix=""): 39 | files = [] 40 | for f in os.listdir(directory): 41 | if f == "__MACOSX": 42 | continue 43 | path = os.path.join(directory, f) 44 | if os.path.isfile(path): 45 | print(f"{prefix}{f}") 46 | files.append(Path(path)) 47 | elif os.path.isdir(path): 48 | print(f"{prefix}{f}/") 49 | files.extend(self.log_and_collect_files(path, prefix=f"{prefix}{f}/")) 50 | return files 51 | 52 | def update_workflow(self, workflow, **kwargs): 53 | workflow["6"]["inputs"]["text"] = kwargs["prompt"] 54 | workflow["7"]["inputs"]["text"] = f"nsfw, nude, {kwargs['negative_prompt']}" 55 | 56 | sampler = workflow["10"]["inputs"] 57 | sampler["seed"] = kwargs["seed"] 58 | sampler["steps"] = kwargs["steps"] 59 | sampler["cfg"] = kwargs["guidance_scale"] 60 | 61 | resize_input = workflow["60"]["inputs"] 62 | resize_input["width"] = kwargs["max_width"] 63 | resize_input["height"] = kwargs["max_height"] 64 | 65 | if kwargs["material_strength"] == "strong": 66 | workflow["44"]["inputs"]["preset"] = "PLUS (high strength)" 67 | else: 68 | workflow["44"]["inputs"]["preset"] = "STANDARD (medium strength)" 69 | 70 | def predict( 71 | self, 72 | material_image: Path = Input( 73 | description="Material to transfer to the input image", 74 | ), 75 | subject_image: Path = Input( 76 | description="Subject image to transfer the material to", 77 | ), 78 | prompt: str = Input( 79 | description="Use a prompt that describe the image when the material is applied", 80 | default="marble sculpture", 81 | ), 82 | negative_prompt: str = Input( 83 | description="What you do not want to see in the image", 84 | default="", 85 | ), 86 | guidance_scale: float = Input( 87 | description="Guidance scale for the diffusion process", 88 | default=2.0, 89 | ge=1.0, 90 | le=10.0, 91 | ), 92 | steps: int = Input( 93 | description="Number of steps. 6 steps gives good results, but try increasing to 15 or 20 if you need more detail", 94 | default=6, 95 | ), 96 | max_width: int = Input( 97 | description="Max width of the output image", 98 | default=1920, 99 | ), 100 | max_height: int = Input( 101 | description="Max height of the output image", 102 | default=1920, 103 | ), 104 | material_strength: str = Input( 105 | description="Strength of the material", 106 | default="medium", 107 | choices=["medium", "strong"], 108 | ), 109 | return_intermediate_images: bool = Input( 110 | description="Return intermediate images like mask, and annotated images. Useful for debugging.", 111 | default=False, 112 | ), 113 | seed: int = Input( 114 | description="Set a seed for reproducibility. Random by default.", 115 | default=None, 116 | ), 117 | output_format: str = Input( 118 | description="Format of the output images", 119 | choices=["webp", "jpg", "png"], 120 | default="webp", 121 | ), 122 | output_quality: int = Input( 123 | description="Quality of the output images, from 0 to 100. 100 is best quality, 0 is lowest quality.", 124 | default=80, 125 | ge=0, 126 | le=100, 127 | ), 128 | ) -> List[Path]: 129 | """Run a single prediction on the model""" 130 | self.cleanup() 131 | 132 | if seed is None: 133 | seed = random.randint(0, 2**32 - 1) 134 | print(f"Random seed set to: {seed}") 135 | 136 | self.handle_input_file(material_image, "material.png") 137 | self.handle_input_file(subject_image, "subject.png") 138 | 139 | with open("material_transfer_api.json", "r") as file: 140 | workflow = json.loads(file.read()) 141 | 142 | self.update_workflow( 143 | workflow, 144 | prompt=prompt, 145 | negative_prompt=negative_prompt, 146 | guidance_scale=guidance_scale, 147 | steps=steps, 148 | max_width=max_width, 149 | max_height=max_height, 150 | material_strength=material_strength, 151 | seed=seed, 152 | ) 153 | 154 | wf = self.comfyUI.load_workflow(workflow) 155 | self.comfyUI.connect() 156 | self.comfyUI.run_workflow(wf) 157 | 158 | files = [] 159 | output_directories = [OUTPUT_DIR] 160 | if return_intermediate_images: 161 | output_directories.append(COMFYUI_TEMP_OUTPUT_DIR) 162 | 163 | for directory in output_directories: 164 | print(f"Contents of {directory}:") 165 | files.extend(self.log_and_collect_files(directory)) 166 | 167 | if output_quality < 100 or output_format in ["webp", "jpg"]: 168 | optimised_files = [] 169 | for file in files: 170 | if file.is_file() and file.suffix in [".jpg", ".jpeg", ".png"]: 171 | image = Image.open(file) 172 | optimised_file_path = file.with_suffix(f".{output_format}") 173 | image.save( 174 | optimised_file_path, 175 | quality=output_quality, 176 | optimize=True, 177 | ) 178 | optimised_files.append(optimised_file_path) 179 | else: 180 | optimised_files.append(file) 181 | 182 | files = optimised_files 183 | 184 | return files 185 | -------------------------------------------------------------------------------- /scripts/clone_plugins.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # This script is used to clone specific versions of repositories. 4 | # It takes a list of repositories and their commit hashes, clones them into a specific directory, 5 | # and then checks out to the specified commit. 6 | 7 | # List of repositories and their commit hashes to clone 8 | # Each entry in the array is a string containing the repository URL and the commit hash separated by a space. 9 | repos=( 10 | "https://github.com/cubiq/ComfyUI_IPAdapter_plus 0d0a7b3" 11 | "https://github.com/Fannovel16/comfyui_controlnet_aux 692a3d0" 12 | "https://github.com/Kosinkadink/ComfyUI-Advanced-ControlNet 33d9884" 13 | "https://github.com/jags111/efficiency-nodes-comfyui 3b7e89d" 14 | "https://github.com/ssitu/ComfyUI_UltimateSDUpscale bcefc5b" 15 | "https://github.com/cubiq/ComfyUI_essentials 101ebae" 16 | "https://github.com/kealiu/ComfyUI-ZeroShot-MTrans d445504" 17 | ) 18 | 19 | # Destination directory 20 | # This is where the repositories will be cloned into. 21 | dest_dir="ComfyUI/custom_nodes/" 22 | 23 | # Loop over each repository in the list 24 | for repo in "${repos[@]}"; do 25 | # Extract the repository URL and the commit hash from the string 26 | repo_url=$(echo $repo | cut -d' ' -f1) 27 | commit_hash=$(echo $repo | cut -d' ' -f2) 28 | 29 | # Extract the repository name from the URL by removing the .git extension 30 | repo_name=$(basename "$repo_url" .git) 31 | 32 | # Check if the repository directory already exists 33 | if [ ! -d "$dest_dir$repo_name" ]; then 34 | # Clone the repository into the destination directory 35 | echo "Cloning $repo_url into $dest_dir$repo_name and checking out to commit $commit_hash" 36 | git clone --recursive "$repo_url" "$dest_dir$repo_name" 37 | 38 | # Use a subshell to avoid changing the main shell's working directory 39 | # Inside the subshell, change to the repository's directory and checkout to the specific commit 40 | ( 41 | cd "$dest_dir$repo_name" && git checkout "$commit_hash" 42 | rm -rf .git 43 | 44 | # Recursively remove .git directories from submodules 45 | find . -type d -name ".git" -exec rm -rf {} + 46 | 47 | # If the repository is efficiency-nodes-comfyui, also remove the images directory 48 | if [ "$repo_name" = "efficiency-nodes-comfyui" ]; then 49 | echo "Removing images and workflows directories from $repo_name" 50 | rm -rf images workflows 51 | fi 52 | ) 53 | else 54 | echo "Skipping clone for $repo_name, directory already exists" 55 | fi 56 | done 57 | -------------------------------------------------------------------------------- /scripts/reset.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | sudo rm -rf ComfyUI 3 | git submodule update --init --recursive 4 | ./scripts/clone_plugins.sh 5 | -------------------------------------------------------------------------------- /weights.json: -------------------------------------------------------------------------------- 1 | { 2 | "CHECKPOINTS": [ 3 | "512-inpainting-ema.safetensors", 4 | "absolutereality_v181.safetensors", 5 | "albedobaseXL_v13.safetensors", 6 | "albedobaseXL_v21.safetensors", 7 | "anything-v3-fp16-pruned.safetensors", 8 | "CinematicRedmond.safetensors", 9 | "copaxTimelessxlSDXL1_v8.safetensors", 10 | "Deliberate_v2.safetensors", 11 | "dreamlabsoil_V2_v2.safetensors", 12 | "DreamShaper_6.2_BakedVae_pruned.safetensors", 13 | "DreamShaper_6.31_BakedVae.safetensors", 14 | "DreamShaper_6.31_BakedVae_pruned.safetensors", 15 | "DreamShaper_6.31_INPAINTING.inpainting.safetensors", 16 | "DreamShaper_6_BakedVae.safetensors", 17 | "dreamshaper_8.safetensors", 18 | "dreamshaper_8LCM.safetensors", 19 | "dreamshaperXL_alpha2Xl10.safetensors", 20 | "dreamshaperXL_lightningDPMSDE.safetensors", 21 | "dynavision_v20Bakedvae.safetensors", 22 | "epicrealism_naturalSinRC1VAE.safetensors", 23 | "epicrealismXL_v10.safetensors", 24 | "Hyper-SDXL-1step-Unet-Comfyui.fp16.safetensors", 25 | "Hyper-SDXL-1step-Unet.safetensors", 26 | "imp_v10.safetensors", 27 | "Juggernaut-XL_v9_RunDiffusionPhoto_v2.safetensors", 28 | "juggernaut_reborn.safetensors", 29 | "Juggernaut_RunDiffusionPhoto2_Lightning_4Steps.safetensors", 30 | "juggernautXL_juggernautX.safetensors", 31 | "juggernautXL_v8Rundiffusion.safetensors", 32 | "LCM_Dreamshaper_v7_4k.safetensors", 33 | "magicmixReverie_v10.safetensors", 34 | "majicmixRealistic_v7.safetensors", 35 | "motionctrl.pth", 36 | "motionctrl_svd.ckpt", 37 | "photonLCM_v10.safetensors", 38 | "pixlAnimeCartoonComic_v10.safetensors", 39 | "playground-v2.5-1024px-aesthetic.fp16.safetensors", 40 | "proteus_v02.safetensors", 41 | "ProteusV0.4-Lighting.safetensors", 42 | "ProteusV0.4.safetensors", 43 | "rcnzCartoon3d_v20.safetensors", 44 | "Realistic_Vision_V5.1-inpainting.ckpt", 45 | "Realistic_Vision_V5.1-inpainting.safetensors", 46 | "Realistic_Vision_V5.1.ckpt", 47 | "Realistic_Vision_V5.1.safetensors", 48 | "Realistic_Vision_V5.1_fp16-no-ema-inpainting.ckpt", 49 | "Realistic_Vision_V5.1_fp16-no-ema-inpainting.safetensors", 50 | "Realistic_Vision_V5.1_fp16-no-ema.ckpt", 51 | "Realistic_Vision_V5.1_fp16-no-ema.safetensors", 52 | "Realistic_Vision_V6.0_NV_B1.safetensors", 53 | "Realistic_Vision_V6.0_NV_B1_fp16.safetensors", 54 | "Realistic_Vision_V6.0_NV_B1_inpainting.safetensors", 55 | "Realistic_Vision_V6.0_NV_B1_inpainting_fp16.safetensors", 56 | "realisticLCMBYStable_v10.safetensors", 57 | "RealVisXL_V2.0.safetensors", 58 | "RealVisXL_V3.0.safetensors", 59 | "RealVisXL_V3.0_Turbo.safetensors", 60 | "RealVisXL_V4.0.safetensors", 61 | "RealVisXL_V4.0_Lightning.safetensors", 62 | "rundiffusionXL_beta.safetensors", 63 | "sd_xl_base_1.0.safetensors", 64 | "sd_xl_base_1.0_0.9vae.safetensors", 65 | "sd_xl_refiner_1.0.safetensors", 66 | "sd_xl_refiner_1.0_0.9vae.safetensors", 67 | "sd_xl_turbo_1.0.safetensors", 68 | "sd_xl_turbo_1.0_fp16.safetensors", 69 | "sdxl_lightning_1step_x0.safetensors", 70 | "sdxl_lightning_2step.safetensors", 71 | "sdxl_lightning_4step.safetensors", 72 | "sdxl_lightning_8step.safetensors", 73 | "segmind-vega.safetensors", 74 | "SSD-1B.safetensors", 75 | "starlightXLAnimated_v3.safetensors", 76 | "svd.safetensors", 77 | "svd_xt.safetensors", 78 | "toonyou_beta6.safetensors", 79 | "turbovisionxlSuperFastXLBasedOnNew_tvxlV32Bakedvae.safetensors", 80 | "v1-5-pruned-emaonly.ckpt", 81 | "v2-1_512-ema-pruned.safetensors", 82 | "v2-1_768-ema-pruned.ckpt", 83 | "v2-1_768-ema-pruned.safetensors", 84 | "v2-1_768-nonema-pruned.ckpt", 85 | "v2-1_768-nonema-pruned.safetensors", 86 | "wd-illusion-fp16.safetensors", 87 | "x4-upscaler-ema.safetensors" 88 | ], 89 | "UPSCALE_MODELS": [ 90 | "4x-AnimeSharp.pth", 91 | "4x-UltraMix_Balanced.pth", 92 | "4x-UltraMix_Smooth.pth", 93 | "4x-UltraSharp.pth", 94 | "4x_foolhardy_Remacri.pth", 95 | "4x_NMKD-Siax_200k.pth", 96 | "8x_NMKD-Superscale_150000_G.pth", 97 | "ESRGAN_4x.pth", 98 | "RealESRGAN_x2.pth", 99 | "RealESRGAN_x4.pth", 100 | "RealESRGAN_x4plus.pth", 101 | "RealESRGAN_x4plus_anime_6B.pth", 102 | "RealESRGAN_x8.pth" 103 | ], 104 | "CLIP_VISION": [ 105 | "CLIP-ViT-bigG-14-laion2B-39B-b160k.safetensors", 106 | "CLIP-ViT-H-14-laion2B-s32B-b79K.safetensors", 107 | "clip-vit-large-patch14.bin", 108 | "clip_vision_g.safetensors", 109 | "IPAdapter_image_encoder_sd15.safetensors", 110 | "model.15.safetensors", 111 | "model.sdxl.safetensors" 112 | ], 113 | "LORAS": [ 114 | "add-detail-xl.safetensors", 115 | "aesthetic_anime_v1s.safetensors", 116 | "age_slider-LECO-v1.safetensors", 117 | "AnimateLCM_sd15_t2v_lora.safetensors", 118 | "artificialguybr/3DRedmond-3DRenderStyle-3DRenderAF.safetensors", 119 | "artificialguybr/AnalogRedmond-AnalogRedmAF.safetensors", 120 | "artificialguybr/AnalogRedmondV2-Analog-AnalogRedmAF.safetensors", 121 | "artificialguybr/BetterTextRedmond.safetensors", 122 | "artificialguybr/ClayAnimationRedm.safetensors", 123 | "artificialguybr/ClayAnimationRedmond15-ClayAnimation-Clay.safetensors", 124 | "artificialguybr/ColoringBookRedmond-ColoringBook-ColoringBookAF.safetensors", 125 | "artificialguybr/ColoringBookRedmond-ColoringBookAF.safetensors", 126 | "artificialguybr/ColoringBookRedmond21V-FreedomRedmond-ColoringBook-ColoringBookAF.safetensors", 127 | "artificialguybr/CuteCartoon15V-LiberteRedmodModel-Cartoon-CuteCartoonAF.safetensors", 128 | "artificialguybr/CuteCartoonRedmond-CuteCartoon-CuteCartoonAF.safetensors", 129 | "artificialguybr/CuteFruitsRedmond-CtFruitsRedmAF.safetensors", 130 | "artificialguybr/FilmGrainRedmond-FilmGrain-FilmGrainAF.safetensors", 131 | "artificialguybr/IconsRedmond.safetensors", 132 | "artificialguybr/IconsRedmond15V-Icons.safetensors", 133 | "artificialguybr/IconsRedmondV2-Icons.safetensors", 134 | "artificialguybr/LineAniRedmond-LineAniAF.safetensors", 135 | "artificialguybr/LineAniRedmondV2-Lineart-LineAniAF.safetensors", 136 | "artificialguybr/LogoRedmond15V-LogoRedmAF-Logo.safetensors", 137 | "artificialguybr/LogoRedmond_LogoRedAF.safetensors", 138 | "artificialguybr/LogoRedmondV2-Logo-LogoRedmAF.safetensors", 139 | "artificialguybr/MoviePosterRedmond-MoviePoster-MoviePosterRedAF.safetensors", 140 | "artificialguybr/PixelArtRedmond-Lite64.safetensors", 141 | "artificialguybr/PixelArtRedmond15V-PixelArt-PIXARFK.safetensors", 142 | "artificialguybr/PomologicalWatercolorRedmond.safetensors", 143 | "artificialguybr/PS1Redmond-PS1Game-Playstation1Graphics.safetensors", 144 | "artificialguybr/StickersRedmond.safetensors", 145 | "artificialguybr/StickersRedmond15Version-Stickers-Sticker.safetensors", 146 | "artificialguybr/StickersRedmond21V-FreedomRedmond-Sticker-Stickers.safetensors", 147 | "artificialguybr/StoryBookRedmond-KidsRedmAF.safetensors", 148 | "artificialguybr/StoryBookRedmond15-KidsRedmAF-KidsBook.safetensors", 149 | "artificialguybr/StorybookRedmondUnbound-KidsRedmAF.safetensors", 150 | "artificialguybr/StorybookRedmondV2-KidsBook-KidsRedmAF.safetensors", 151 | "artificialguybr/StudioGhibli.Redmond-StdGBRRedmAF-StudioGhibli.safetensors", 152 | "artificialguybr/StudioGhibliRedmond-StdGBRedmAF.safetensors", 153 | "artificialguybr/ToyRedmond-FnkRedmAF.safetensors", 154 | "artificialguybr/TshirtDesignRedmond-TshirtDesignAF.safetensors", 155 | "artificialguybr/TShirtDesignRedmondV2-Tshirtdesign-TshirtDesignAF.safetensors", 156 | "artificialguybr/View360.safetensors", 157 | "COOLKIDS_MERGE_V2.5.safetensors", 158 | "fofr/emoji.safetensors", 159 | "glowneon_xl_v1.safetensors", 160 | "Harrlogos_v2.0.safetensors", 161 | "Hyper-SD15-1step-lora.safetensors", 162 | "Hyper-SD15-2steps-lora.safetensors", 163 | "Hyper-SD15-4steps-lora.safetensors", 164 | "Hyper-SD15-8steps-lora.safetensors", 165 | "Hyper-SDXL-1step-lora.safetensors", 166 | "Hyper-SDXL-2steps-lora.safetensors", 167 | "Hyper-SDXL-4steps-lora.safetensors", 168 | "Hyper-SDXL-8steps-lora.safetensors", 169 | "ip-adapter-faceid-plus_sd15_lora.safetensors", 170 | "ip-adapter-faceid-plusv2_sd15_lora.safetensors", 171 | "ip-adapter-faceid-plusv2_sdxl_lora.safetensors", 172 | "ip-adapter-faceid_sd15_lora.safetensors", 173 | "ip-adapter-faceid_sdxl_lora.safetensors", 174 | "lcm-lora-sdv1-5.safetensors", 175 | "lcm-lora-ssd-1b.safetensors", 176 | "lcm_lora_sdxl.safetensors", 177 | "MODILL_XL_0.27_RC.safetensors", 178 | "more_details.safetensors", 179 | "PerfectEyesXL.safetensors", 180 | "sd_xl_offset_example-lora_1.0.safetensors", 181 | "sdxl_lightning_2step_lora.pth", 182 | "sdxl_lightning_2step_lora.safetensors", 183 | "sdxl_lightning_4step_lora.pth", 184 | "sdxl_lightning_4step_lora.safetensors", 185 | "sdxl_lightning_8step_lora.pth", 186 | "sdxl_lightning_8step_lora.safetensors", 187 | "SDXLrender_v2.0.safetensors", 188 | "Segmind-VegaRT.safetensors", 189 | "theovercomer8sContrastFix_sd15.safetensors", 190 | "theovercomer8sContrastFix_sd21768.safetensors", 191 | "weight_slider-LECO-v1.safetensors" 192 | ], 193 | "EMBEDDINGS": [ 194 | "bad_prompt_version2-neg.pt", 195 | "easynegative.safetensors", 196 | "epiCNegative.pt", 197 | "epiCRealism.pt", 198 | "FastNegativeV2.pt", 199 | "JuggernautNegative-neg.pt", 200 | "negative_hand-neg.pt", 201 | "ng_deepnegative_v1_75t.pt", 202 | "verybadimagenegative_v1.3.pt" 203 | ], 204 | "CONTROLNET": [ 205 | "control-lora-canny-rank128.safetensors", 206 | "control-lora-canny-rank256.safetensors", 207 | "control-lora-depth-rank128.safetensors", 208 | "control-lora-depth-rank256.safetensors", 209 | "control-lora-recolor-rank128.safetensors", 210 | "control-lora-recolor-rank256.safetensors", 211 | "control-lora-sketch-rank128-metadata.safetensors", 212 | "control-lora-sketch-rank256.safetensors", 213 | "control_boxdepth_LooseControlfp16.safetensors", 214 | "control_lora_rank128_v11e_sd15_ip2p_fp16.safetensors", 215 | "control_lora_rank128_v11e_sd15_shuffle_fp16.safetensors", 216 | "control_lora_rank128_v11f1e_sd15_tile_fp16.safetensors", 217 | "control_lora_rank128_v11f1p_sd15_depth_fp16.safetensors", 218 | "control_lora_rank128_v11p_sd15_canny_fp16.safetensors", 219 | "control_lora_rank128_v11p_sd15_inpaint_fp16.safetensors", 220 | "control_lora_rank128_v11p_sd15_lineart_fp16.safetensors", 221 | "control_lora_rank128_v11p_sd15_mlsd_fp16.safetensors", 222 | "control_lora_rank128_v11p_sd15_normalbae_fp16.safetensors", 223 | "control_lora_rank128_v11p_sd15_openpose_fp16.safetensors", 224 | "control_lora_rank128_v11p_sd15_scribble_fp16.safetensors", 225 | "control_lora_rank128_v11p_sd15_seg_fp16.safetensors", 226 | "control_lora_rank128_v11p_sd15_softedge_fp16.safetensors", 227 | "control_lora_rank128_v11p_sd15s2_lineart_anime_fp16.safetensors", 228 | "control_sd15_inpaint_depth_hand_fp16.safetensors", 229 | "control_v11e_sd15_ip2p.pth", 230 | "control_v11e_sd15_ip2p_fp16.safetensors", 231 | "control_v11e_sd15_shuffle.pth", 232 | "control_v11e_sd15_shuffle_fp16.safetensors", 233 | "control_v11f1e_sd15_tile.pth", 234 | "control_v11f1e_sd15_tile_fp16.safetensors", 235 | "control_v11f1p_sd15_depth.pth", 236 | "control_v11f1p_sd15_depth_fp16.safetensors", 237 | "control_v11p_sd15_canny.pth", 238 | "control_v11p_sd15_canny_fp16.safetensors", 239 | "control_v11p_sd15_inpaint.pth", 240 | "control_v11p_sd15_inpaint_fp16.safetensors", 241 | "control_v11p_sd15_lineart.pth", 242 | "control_v11p_sd15_lineart_fp16.safetensors", 243 | "control_v11p_sd15_mlsd.pth", 244 | "control_v11p_sd15_mlsd_fp16.safetensors", 245 | "control_v11p_sd15_normalbae.pth", 246 | "control_v11p_sd15_normalbae_fp16.safetensors", 247 | "control_v11p_sd15_openpose.pth", 248 | "control_v11p_sd15_openpose_fp16.safetensors", 249 | "control_v11p_sd15_scribble.pth", 250 | "control_v11p_sd15_scribble_fp16.safetensors", 251 | "control_v11p_sd15_seg.pth", 252 | "control_v11p_sd15_seg_fp16.safetensors", 253 | "control_v11p_sd15_softedge.pth", 254 | "control_v11p_sd15_softedge_fp16.safetensors", 255 | "control_v11p_sd15s2_lineart_anime.pth", 256 | "control_v11p_sd15s2_lineart_anime_fp16.safetensors", 257 | "control_v11u_sd15_tile_fp16.safetensors", 258 | "control_v1p_sd15_qrcode_monster.safetensors", 259 | "control_v1p_sdxl_qrcode_monster.safetensors", 260 | "controllllite_v01032064e_sdxl_canny_anime.safetensors", 261 | "controlnet-canny-sdxl-1.0.fp16.safetensors", 262 | "controlnet-depth-sdxl-1.0.fp16.safetensors", 263 | "controlnet-sd-xl-1.0-softedge-dexined.safetensors", 264 | "controlnet-temporalnet-sdxl-1.0.safetensors", 265 | "depth-anything.safetensors", 266 | "depth-zoe-xl-v1.0-controlnet.safetensors", 267 | "diffusers_xl_canny_full.safetensors", 268 | "diffusers_xl_canny_mid.safetensors", 269 | "diffusers_xl_canny_small.safetensors", 270 | "diffusers_xl_depth_full.safetensors", 271 | "diffusers_xl_depth_mid.safetensors", 272 | "diffusers_xl_depth_small.safetensors", 273 | "instantid-controlnet.safetensors", 274 | "ioclab_sd15_recolor.safetensors", 275 | "ip-adapter_sd15.pth", 276 | "ip-adapter_sd15_plus.pth", 277 | "ip-adapter_xl.pth", 278 | "kohya_controllllite_xl_blur.safetensors", 279 | "kohya_controllllite_xl_blur_anime.safetensors", 280 | "kohya_controllllite_xl_blur_anime_beta.safetensors", 281 | "kohya_controllllite_xl_canny.safetensors", 282 | "kohya_controllllite_xl_canny_anime.safetensors", 283 | "kohya_controllllite_xl_depth.safetensors", 284 | "kohya_controllllite_xl_depth_anime.safetensors", 285 | "kohya_controllllite_xl_openpose_anime.safetensors", 286 | "kohya_controllllite_xl_openpose_anime_v2.safetensors", 287 | "kohya_controllllite_xl_scribble_anime.safetensors", 288 | "OpenPoseXL2.safetensors", 289 | "sai_xl_canny_128lora.safetensors", 290 | "sai_xl_canny_256lora.safetensors", 291 | "sai_xl_depth_128lora.safetensors", 292 | "sai_xl_depth_256lora.safetensors", 293 | "sai_xl_recolor_128lora.safetensors", 294 | "sai_xl_recolor_256lora.safetensors", 295 | "sai_xl_sketch_128lora.safetensors", 296 | "sai_xl_sketch_256lora.safetensors", 297 | "sargezt_xl_depth.safetensors", 298 | "sargezt_xl_depth_faid_vidit.safetensors", 299 | "sargezt_xl_depth_zeed.safetensors", 300 | "sargezt_xl_softedge.safetensors", 301 | "t2i-adapter_diffusers_xl_canny.safetensors", 302 | "t2i-adapter_diffusers_xl_depth_midas.safetensors", 303 | "t2i-adapter_diffusers_xl_depth_zoe.safetensors", 304 | "t2i-adapter_diffusers_xl_lineart.safetensors", 305 | "t2i-adapter_diffusers_xl_openpose.safetensors", 306 | "t2i-adapter_diffusers_xl_sketch.safetensors", 307 | "t2i-adapter_xl_canny.safetensors", 308 | "t2i-adapter_xl_openpose.safetensors", 309 | "t2i-adapter_xl_sketch.safetensors", 310 | "t2iadapter_canny_sd14v1.pth", 311 | "t2iadapter_color_sd14v1.pth", 312 | "t2iadapter_depth_sd14v1.pth", 313 | "t2iadapter_keypose_sd14v1.pth", 314 | "t2iadapter_openpose_sd14v1.pth", 315 | "t2iadapter_seg_sd14v1.pth", 316 | "t2iadapter_sketch_sd14v1.pth", 317 | "t2iadapter_style_sd14v1.pth", 318 | "temporalnetversion2.ckpt", 319 | "thibaud_xl_openpose.safetensors", 320 | "thibaud_xl_openpose_256lora.safetensors" 321 | ], 322 | "IPADAPTER": [ 323 | "ip-adapter-faceid-plus_sd15.bin", 324 | "ip-adapter-faceid-plusv2_sd15.bin", 325 | "ip-adapter-faceid-plusv2_sdxl.bin", 326 | "ip-adapter-faceid-portrait-v11_sd15.bin", 327 | "ip-adapter-faceid-portrait_sd15.bin", 328 | "ip-adapter-faceid-portrait_sdxl.bin", 329 | "ip-adapter-faceid-portrait_sdxl_unnorm.bin", 330 | "ip-adapter-faceid_sd15.bin", 331 | "ip-adapter-faceid_sdxl.bin", 332 | "ip-adapter-full-face_sd15.bin", 333 | "ip-adapter-full-face_sd15.safetensors", 334 | "ip-adapter-plus-face_sd15.bin", 335 | "ip-adapter-plus-face_sd15.safetensors", 336 | "ip-adapter-plus-face_sdxl_vit-h.bin", 337 | "ip-adapter-plus-face_sdxl_vit-h.safetensors", 338 | "ip-adapter-plus_sd15.bin", 339 | "ip-adapter-plus_sd15.safetensors", 340 | "ip-adapter-plus_sdxl_vit-h.bin", 341 | "ip-adapter-plus_sdxl_vit-h.safetensors", 342 | "ip-adapter_sd15.bin", 343 | "ip-adapter_sd15.safetensors", 344 | "ip-adapter_sd15_light.bin", 345 | "ip-adapter_sd15_light.safetensors", 346 | "ip-adapter_sd15_light_v11.bin", 347 | "ip-adapter_sd15_vit-G.bin", 348 | "ip-adapter_sd15_vit-G.safetensors", 349 | "ip-adapter_sdxl.safetensors", 350 | "ip-adapter_sdxl_vit-h.safetensors", 351 | "ip_plus_composition_sd15.safetensors", 352 | "ip_plus_composition_sdxl.safetensors" 353 | ], 354 | "VAE": [ 355 | "sdxl_vae.safetensors", 356 | "vae-ft-mse-840000-ema-pruned.safetensors" 357 | ], 358 | "UNET": [ 359 | "sdxl_lightning_1step_unet_x0.pth", 360 | "sdxl_lightning_1step_unet_x0.safetensors", 361 | "sdxl_lightning_2step_unet.pth", 362 | "sdxl_lightning_2step_unet.safetensors", 363 | "sdxl_lightning_4step_unet.pth", 364 | "sdxl_lightning_4step_unet.safetensors", 365 | "sdxl_lightning_8step_unet.pth", 366 | "sdxl_lightning_8step_unet.safetensors" 367 | ], 368 | "PHOTOMAKER": [ 369 | "photomaker-v1.bin" 370 | ], 371 | "INSTANTID": [ 372 | "instantid-ip-adapter.bin" 373 | ], 374 | "INSIGHTFACE": [ 375 | "antelopev2", 376 | "buffalo_l", 377 | "inswapper_128.onnx", 378 | "inswapper_128_fp16.onnx", 379 | "models/antelopev2", 380 | "models/buffalo_l" 381 | ], 382 | "FACEDETECTION": [ 383 | "detection_mobilenet0.25_Final.pth", 384 | "detection_Resnet50_Final.pth", 385 | "parsing_parsenet.pth", 386 | "yolov5l-face.pth", 387 | "yolov5n-face.pth" 388 | ], 389 | "FACERESTORE_MODELS": [ 390 | "codeformer.pth", 391 | "GFPGANv1.3.pth", 392 | "GFPGANv1.4.pth", 393 | "RestoreFormer.pth" 394 | ], 395 | "MMDETS": [ 396 | "bbox/mmdet_anime-face_yolov3.pth" 397 | ], 398 | "SAMS": [ 399 | "mobile_sam.pt", 400 | "sam_hq_vit_b.pth", 401 | "sam_hq_vit_h.pth", 402 | "sam_hq_vit_l.pth", 403 | "sam_vit_b_01ec64.pth", 404 | "sam_vit_h_4b8939.pth", 405 | "sam_vit_l_0b3195.pth" 406 | ], 407 | "GROUNDING-DINO": [ 408 | "groundingdino_swinb_cogcoor.pth", 409 | "groundingdino_swint_ogc.pth" 410 | ], 411 | "BERT-BASE-UNCASED": [ 412 | "bert-base-uncased" 413 | ], 414 | "ULTRALYTICS": [ 415 | "bbox/face_yolov8m.pt", 416 | "bbox/hand_yolov8s.pt", 417 | "segm/hair_yolov8n-seg_60.pt", 418 | "segm/person_yolov8m-seg.pt" 419 | ], 420 | "LAYER_MODEL": [ 421 | "layer_sd15_bg2fg.safetensors", 422 | "layer_sd15_fg2bg.safetensors", 423 | "layer_sd15_joint.safetensors", 424 | "layer_sd15_transparent_attn.safetensors", 425 | "layer_sd15_vae_transparent_decoder.safetensors", 426 | "layer_xl_bg2ble.safetensors", 427 | "layer_xl_bgble2fg.safetensors", 428 | "layer_xl_fg2ble.safetensors", 429 | "layer_xl_fgble2bg.safetensors", 430 | "layer_xl_transparent_attn.safetensors", 431 | "layer_xl_transparent_conv.safetensors", 432 | "vae_transparent_decoder.safetensors" 433 | ], 434 | "CLIPSEG": [ 435 | "models--CIDAS--clipseg-rd64-refined" 436 | ], 437 | "REMBG": [ 438 | "u2net.onnx" 439 | ] 440 | } 441 | -------------------------------------------------------------------------------- /weights_downloader.py: -------------------------------------------------------------------------------- 1 | import subprocess 2 | import time 3 | import os 4 | 5 | from weights_manifest import WeightsManifest 6 | 7 | BASE_URL = "https://weights.replicate.delivery/default/comfy-ui" 8 | 9 | 10 | class WeightsDownloader: 11 | supported_filetypes = [ 12 | ".ckpt", 13 | ".safetensors", 14 | ".pt", 15 | ".pth", 16 | ".bin", 17 | ".onnx", 18 | ".torchscript", 19 | ] 20 | 21 | def __init__(self): 22 | self.weights_manifest = WeightsManifest() 23 | self.weights_map = self.weights_manifest.weights_map 24 | 25 | def get_weights_by_type(self, type): 26 | return self.weights_manifest.get_weights_by_type(type) 27 | 28 | def download_weights(self, weight_str): 29 | if weight_str in self.weights_map: 30 | if self.weights_manifest.is_non_commercial_only(weight_str): 31 | print( 32 | f"⚠️ {weight_str} is for non-commercial use only. Unless you have obtained a commercial license.\nDetails: https://github.com/fofr/cog-comfyui/blob/main/weights_licenses.md" 33 | ) 34 | self.download_if_not_exists( 35 | weight_str, 36 | self.weights_map[weight_str]["url"], 37 | self.weights_map[weight_str]["dest"], 38 | ) 39 | else: 40 | raise ValueError( 41 | f"{weight_str} unavailable. View the list of available weights: https://github.com/fofr/cog-comfyui/blob/main/supported_weights.md" 42 | ) 43 | 44 | def download_torch_checkpoints(self): 45 | self.download_if_not_exists( 46 | "mobilenet_v2-b0353104.pth", 47 | f"{BASE_URL}/custom_nodes/comfyui_controlnet_aux/mobilenet_v2-b0353104.pth.tar", 48 | "/root/.cache/torch/hub/checkpoints/", 49 | ) 50 | 51 | self.download_if_not_exists( 52 | "u2net.onnx", 53 | f"{BASE_URL}/rembg/u2net.onnx.tar", 54 | "/root/.u2net/", 55 | ) 56 | 57 | def download_if_not_exists(self, weight_str, url, dest): 58 | if not os.path.exists(f"{dest}/{weight_str}"): 59 | self.download(weight_str, url, dest) 60 | 61 | def download(self, weight_str, url, dest): 62 | if "/" in weight_str: 63 | subfolder = weight_str.rsplit("/", 1)[0] 64 | dest = os.path.join(dest, subfolder) 65 | os.makedirs(dest, exist_ok=True) 66 | 67 | print(f"⏳ Downloading {weight_str} to {dest}") 68 | start = time.time() 69 | subprocess.check_call( 70 | ["pget", "--log-level", "warn", "-xf", url, dest], close_fds=False 71 | ) 72 | elapsed_time = time.time() - start 73 | try: 74 | file_size_bytes = os.path.getsize( 75 | os.path.join(dest, os.path.basename(weight_str)) 76 | ) 77 | file_size_megabytes = file_size_bytes / (1024 * 1024) 78 | print( 79 | f"⌛️ Downloaded {weight_str} in {elapsed_time:.2f}s, size: {file_size_megabytes:.2f}MB" 80 | ) 81 | except FileNotFoundError: 82 | print(f"⌛️ Downloaded {weight_str} in {elapsed_time:.2f}s") 83 | -------------------------------------------------------------------------------- /weights_manifest.py: -------------------------------------------------------------------------------- 1 | import subprocess 2 | import time 3 | import os 4 | import json 5 | 6 | from helpers.ComfyUI_Controlnet_Aux import ComfyUI_Controlnet_Aux 7 | 8 | UPDATED_WEIGHTS_MANIFEST_URL = f"https://weights.replicate.delivery/default/comfy-ui/weights.json?cache_bypass={int(time.time())}" 9 | UPDATED_WEIGHTS_MANIFEST_PATH = "updated_weights.json" 10 | WEIGHTS_MANIFEST_PATH = "weights.json" 11 | 12 | BASE_URL = "https://weights.replicate.delivery/default/comfy-ui" 13 | BASE_PATH = "ComfyUI/models" 14 | 15 | 16 | class WeightsManifest: 17 | def __init__(self): 18 | self.weights_manifest = self._load_weights_manifest() 19 | self.weights_map = self._initialize_weights_map() 20 | 21 | def _load_weights_manifest(self): 22 | self._download_updated_weights_manifest() 23 | return self._merge_manifests() 24 | 25 | def _download_updated_weights_manifest(self): 26 | if not os.path.exists(UPDATED_WEIGHTS_MANIFEST_PATH): 27 | print( 28 | f"Downloading updated weights manifest from {UPDATED_WEIGHTS_MANIFEST_URL}" 29 | ) 30 | start = time.time() 31 | subprocess.check_call( 32 | [ 33 | "pget", 34 | "--log-level", 35 | "warn", 36 | "-f", 37 | UPDATED_WEIGHTS_MANIFEST_URL, 38 | UPDATED_WEIGHTS_MANIFEST_PATH, 39 | ], 40 | close_fds=False, 41 | ) 42 | print( 43 | f"Downloading {UPDATED_WEIGHTS_MANIFEST_URL} took: {(time.time() - start):.2f}s" 44 | ) 45 | else: 46 | print("Updated weights manifest file already exists") 47 | 48 | def _merge_manifests(self): 49 | if os.path.exists(WEIGHTS_MANIFEST_PATH): 50 | with open(WEIGHTS_MANIFEST_PATH, "r") as f: 51 | original_manifest = json.load(f) 52 | else: 53 | original_manifest = {} 54 | 55 | with open(UPDATED_WEIGHTS_MANIFEST_PATH, "r") as f: 56 | updated_manifest = json.load(f) 57 | 58 | for key in updated_manifest: 59 | if key in original_manifest: 60 | for item in updated_manifest[key]: 61 | if item not in original_manifest[key]: 62 | print(f"Adding {item} to {key}") 63 | original_manifest[key].append(item) 64 | else: 65 | original_manifest[key] = updated_manifest[key] 66 | 67 | return original_manifest 68 | 69 | def _generate_weights_map(self, keys, dest): 70 | return { 71 | key: { 72 | "url": f"{BASE_URL}/{dest}/{key}.tar", 73 | "dest": f"{BASE_PATH}/{dest}", 74 | } 75 | for key in keys 76 | } 77 | 78 | def _initialize_weights_map(self): 79 | weights_map = {} 80 | for key in self.weights_manifest.keys(): 81 | if key.isupper(): 82 | weights_map.update( 83 | self._generate_weights_map(self.weights_manifest[key], key.lower()) 84 | ) 85 | weights_map.update(ComfyUI_Controlnet_Aux.weights_map(BASE_URL)) 86 | 87 | print("Allowed weights:") 88 | for weight in weights_map.keys(): 89 | print(weight) 90 | 91 | return weights_map 92 | 93 | def non_commercial_weights(self): 94 | return [ 95 | "inswapper_128.onnx", 96 | "inswapper_128_fp16.onnx", 97 | "proteus_v02.safetensors", 98 | "RealVisXL_V3.0_Turbo.safetensors", 99 | "sd_xl_turbo_1.0.safetensors", 100 | "sd_xl_turbo_1.0_fp16.safetensors", 101 | "svd.safetensors", 102 | "svd_xt.safetensors", 103 | "turbovisionxlSuperFastXLBasedOnNew_tvxlV32Bakedvae", 104 | "copaxTimelessxlSDXL1_v8.safetensors", 105 | "MODILL_XL_0.27_RC.safetensors", 106 | "epicrealismXL_v10.safetensors", 107 | "RMBG-1.4/model.pth", 108 | ] 109 | 110 | def is_non_commercial_only(self, weight_str): 111 | return weight_str in self.non_commercial_weights() 112 | 113 | def get_weights_by_type(self, weight_type): 114 | return self.weights_manifest.get(weight_type, []) 115 | --------------------------------------------------------------------------------