├── .DS_Store
├── .gitignore
├── .idea
├── .gitignore
├── comfyui-llm-node-for-amazon-bedrock.iml
├── misc.xml
├── modules.xml
└── vcs.xml
├── CODE_OF_CONDUCT.md
├── CONTRIBUTING.md
├── LICENSE
├── README.md
├── __init__.py
├── assets
├── base_models_us-east-1.png
├── base_models_us-east-2.png
├── cat.png
├── comfyui_on_sagemaker.yaml
├── example_claude3_multimodal.webp
├── example_generate_image_variation.webp
├── example_inpainting_with_natural_language.webp
├── example_prompts_refine.webp
├── example_variation_with_caption.webp
├── flying_duck.png
├── img2vid_nova_reel.png
├── img2vid_nova_reel_output_example.mp4
├── man_in_armor.png
├── model_access.webp
├── nova_canvas_workflows.png
├── nova_video_output.png
├── stability_ai_workflows.png
├── stack_complete.webp
├── text2vid_luma_output_example.mp4
├── text2vid_luma_ray.png
├── text2vid_nova_reel.png
└── text2vid_nova_reel_output_example.mp4
├── images
├── edit12.png
├── edit7.png
├── gen9.png
└── ref7.jpg
├── nodes
├── bedrock.py
├── bedrock_luma_ray_video.py
├── bedrock_nova_image.py
├── bedrock_nova_video.py
├── bedrock_stability_image.py
├── bedrock_titan_image.py
├── file_utils.py
├── json.py
├── prompts.py
├── s3.py
├── session.py
├── textract.py
└── utils.py
├── requirements.txt
└── workflows
├── amazon_nova_canvas_nodes.json
├── amazon_stablity_ai.json
├── generate_image_variation.json
├── img2vid_nova_reel.json
├── inpainting_with_natural_language.json
├── lama.json
├── sensitive erase.json
├── subtitles_translate.json
├── text2img_with_prompt_refinement.json
├── text2vid_luma_ray2.json
├── text2vid_nova_reel.json
└── variation_with_caption.json
/.DS_Store:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/aws-samples/comfyui-llm-node-for-amazon-bedrock/b05443acaf45d496a2e8865c950dd84150ec3ffb/.DS_Store
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | .DS_Store*
2 |
--------------------------------------------------------------------------------
/.idea/.gitignore:
--------------------------------------------------------------------------------
1 | # Default ignored files
2 | /shelf/
3 | /workspace.xml
4 |
--------------------------------------------------------------------------------
/.idea/comfyui-llm-node-for-amazon-bedrock.iml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
--------------------------------------------------------------------------------
/.idea/misc.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
--------------------------------------------------------------------------------
/.idea/modules.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
--------------------------------------------------------------------------------
/.idea/vcs.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
--------------------------------------------------------------------------------
/CODE_OF_CONDUCT.md:
--------------------------------------------------------------------------------
1 | ## Code of Conduct
2 | This project has adopted the [Amazon Open Source Code of Conduct](https://aws.github.io/code-of-conduct).
3 | For more information see the [Code of Conduct FAQ](https://aws.github.io/code-of-conduct-faq) or contact
4 | opensource-codeofconduct@amazon.com with any additional questions or comments.
5 |
--------------------------------------------------------------------------------
/CONTRIBUTING.md:
--------------------------------------------------------------------------------
1 | # Contributing Guidelines
2 |
3 | Thank you for your interest in contributing to our project. Whether it's a bug report, new feature, correction, or additional
4 | documentation, we greatly value feedback and contributions from our community.
5 |
6 | Please read through this document before submitting any issues or pull requests to ensure we have all the necessary
7 | information to effectively respond to your bug report or contribution.
8 |
9 |
10 | ## Reporting Bugs/Feature Requests
11 |
12 | We welcome you to use the GitHub issue tracker to report bugs or suggest features.
13 |
14 | When filing an issue, please check existing open, or recently closed, issues to make sure somebody else hasn't already
15 | reported the issue. Please try to include as much information as you can. Details like these are incredibly useful:
16 |
17 | * A reproducible test case or series of steps
18 | * The version of our code being used
19 | * Any modifications you've made relevant to the bug
20 | * Anything unusual about your environment or deployment
21 |
22 |
23 | ## Contributing via Pull Requests
24 | Contributions via pull requests are much appreciated. Before sending us a pull request, please ensure that:
25 |
26 | 1. You are working against the latest source on the *main* branch.
27 | 2. You check existing open, and recently merged, pull requests to make sure someone else hasn't addressed the problem already.
28 | 3. You open an issue to discuss any significant work - we would hate for your time to be wasted.
29 |
30 | To send us a pull request, please:
31 |
32 | 1. Fork the repository.
33 | 2. Modify the source; please focus on the specific change you are contributing. If you also reformat all the code, it will be hard for us to focus on your change.
34 | 3. Ensure local tests pass.
35 | 4. Commit to your fork using clear commit messages.
36 | 5. Send us a pull request, answering any default questions in the pull request interface.
37 | 6. Pay attention to any automated CI failures reported in the pull request, and stay involved in the conversation.
38 |
39 | GitHub provides additional document on [forking a repository](https://help.github.com/articles/fork-a-repo/) and
40 | [creating a pull request](https://help.github.com/articles/creating-a-pull-request/).
41 |
42 |
43 | ## Finding contributions to work on
44 | Looking at the existing issues is a great way to find something to contribute on. As our projects, by default, use the default GitHub issue labels (enhancement/bug/duplicate/help wanted/invalid/question/wontfix), looking at any 'help wanted' issues is a great place to start.
45 |
46 |
47 | ## Code of Conduct
48 | This project has adopted the [Amazon Open Source Code of Conduct](https://aws.github.io/code-of-conduct).
49 | For more information see the [Code of Conduct FAQ](https://aws.github.io/code-of-conduct-faq) or contact
50 | opensource-codeofconduct@amazon.com with any additional questions or comments.
51 |
52 |
53 | ## Security issue notifications
54 | If you discover a potential security issue in this project we ask that you notify AWS/Amazon Security via our [vulnerability reporting page](http://aws.amazon.com/security/vulnerability-reporting/). Please do **not** create a public github issue.
55 |
56 |
57 | ## Licensing
58 |
59 | See the [LICENSE](LICENSE) file for our project's licensing. We will ask you to confirm the licensing of your contribution.
60 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | MIT No Attribution
2 |
3 | Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy of
6 | this software and associated documentation files (the "Software"), to deal in
7 | the Software without restriction, including without limitation the rights to
8 | use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
9 | the Software, and to permit persons to whom the Software is furnished to do so.
10 |
11 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
12 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
13 | FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
14 | COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
15 | IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
16 | CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
17 |
18 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # Amazon Bedrock nodes for ComfyUI
2 |
3 |
4 | [***Amazon Bedrock***](https://aws.amazon.com/bedrock/) is a fully managed service that offers a choice of high-performing foundation models (FMs) from leading AI companies.
5 | This repo is the ComfyUI nodes for Bedrock service. You can invoke foundation models in your ComfyUI pipeline.
6 |
7 | ## Installation (SageMaker by CloudFormation)
8 |
9 | Using [__*Amazon SageMaker*__](https://aws.amazon.com/sagemaker/) is the easiest way to develop your AI model. You can deploy a ComfyUI on SageMaker notebook using CloudFormation.
10 |
11 | 1. Open [CloudFormation console](https://console.aws.amazon.com/cloudformation/home#/stacks/create), and upload [`./assets/comfyui_on_sagemaker.yaml`](https://raw.githubusercontent.com/aws-samples/comfyui-llm-node-for-amazon-bedrock/main/assets/comfyui_on_sagemaker.yaml) by "Upload a template file".
12 | 2. Next enter a stack name, choose a instance type fits for you. Just next and next and submit.
13 | 3. Wait for a moment, and you will find the ComfyUI url is ready for you. Enjoy!
14 |
15 | 
16 |
17 | ## Installation (Manually)
18 |
19 | 1. Clone this repository to your ComfyUI `custom_nodes` directory:
20 |
21 | ```bash
22 | cd ComfyUI/custom_nodes
23 | git clone https://github.com/aws-samples/comfyui-llm-node-for-amazon-bedrock.git
24 | pip install -r comfyui-llm-node-for-amazon-bedrock/requirements.txt
25 |
26 | # better to work with some third-party nodes
27 | git clone https://github.com/WASasquatch/was-node-suite-comfyui.git
28 | git clone https://github.com/pythongosssss/ComfyUI-Custom-Scripts.git
29 | ```
30 |
31 | 2. You need to make sure your access to Bedrock models are granted. Go to aws console [*https://console.aws.amazon.com/bedrock/home#/modelaccess*](https://console.aws.amazon.com/bedrock/home#/modelaccess) . Make sure these models in the figure are checked.
32 |
33 | 
34 | 
35 |
36 |
37 | 3. You need configure credential for your environments with IAM Role or AKSK.
38 |
39 | - IAM Role
40 |
41 | If you are runing ComfyUI on your aws instance, you could use IAM role to control the policy to access to Bedrock service without AKSK configuration.
42 |
43 | Open the IAM role console of your running instance, and attach `AmazonBedrockFullAccess` policy to your role.
44 |
45 | Alternatively, you can create an inline policy to your role like this:
46 |
47 | ```json
48 | {
49 | "Version": "2012-10-17",
50 | "Statement": [
51 | {
52 | "Effect": "Allow",
53 | "Action": "bedrock:*",
54 | "Resource": "*"
55 | }
56 | ]
57 | }
58 | ```
59 |
60 | - AKSK (AccessKeySecretKey)
61 |
62 | You need to make sure the AKSK user has same policy as the IAM role described before. You can use the aws command tool to configure your credentials file:
63 |
64 | ```
65 | aws configure
66 | ```
67 |
68 | Alternatively, you can create the credentials file yourself. By default, its location is ~/.aws/credentials. At a minimum, the credentials file should specify the access key and secret access key. In this example, the key and secret key for the account are specified in the default profile:
69 |
70 | ```
71 | [default]
72 | aws_access_key_id = YOUR_ACCESS_KEY
73 | aws_secret_access_key = YOUR_SECRET_KEY
74 | ```
75 |
76 | You may also want to add a default region to the AWS configuration file, which is located by default at ~/.aws/config:
77 |
78 | ```
79 | [default]
80 | region=us-east-1
81 | ```
82 |
83 | If you haven't set the default region and running on aws instance, this nodes will automatically use the same region as the running instance.
84 |
85 | ## Example
86 |
87 | Workflow examples are in `./workflows`. To import these workflows, click "Load" in the ComfyUI UI, go to workflows directory and choose the one you want to experiment with.
88 |
89 | ### Text to image with prompt translation and refinement
90 | Automatically refine the text prompt to generate high quality images.
91 |
92 | Download [this workflow file](workflows/text2img_with_prompt_refinement.json) and load in ComfyUI
93 |
94 | You can use the Bedrock LLM to refine and translate the prompt. It then utilize the image generation model (eg. SDXL, Titan Image) provided by Bedrock.
95 | The result is much better after preprocessing of prompt compared to the original SDXL model (the bottom output in figure) which doesn't have the capability of understanding Chinese.
96 |
97 | 
98 |
99 | ### Image Caption with Claude 3
100 |
101 | Generate captions of a provided image.
102 |
103 | Download [this workflow file](workflows/claude3_image_caption.json) and load in ComfyUI
104 |
105 | This workflow uses Bedrock Claude 3 multimodal to caption image.
106 |
107 | 
108 |
109 | ### Inpainting with natural language
110 | Use natural language to describe an item in the image and replace it.
111 |
112 | Download [this workflow file](workflows/inpainting_with_natural_language.json) and load in ComfyUI
113 |
114 | This workflow leverages Claude3 to analyze the replacement information in the prompt. Additionally, it utilizes Bedrock Titan Image to detect objects with text and perform inpainting in a single step.
115 |
116 | 
117 |
118 | ### Generate Image Variation
119 | Use natural language to generate variation of an image.
120 |
121 | Download [this workflow file](workflows/generate_image_variation.json) and load in ComfyUI
122 |
123 | This workflow begins by using Bedrock Claude3 to refine the image editing prompt. It then utilizes Bedrock Titan Image's variation feature to generate similar images based on the refined prompt.
124 |
125 | 
126 |
127 |
128 | ### Generate Image Variation with Image Caption
129 | Use natural language to generate variation of an image without re-describing the original image content.
130 |
131 | Download [this workflow file](workflows/variation_with_caption.json) and load in ComfyUI
132 |
133 | This workflow begins by using Bedrock Claude3 to refine the image editing prompt, generation caption of the original image, and merge the two image description into one. It then utilizes Bedrock Titan Image's variation feature to generate similar images based on the refined prompt.
134 |
135 | 
136 |
137 | ### Nova Canvas Examples
138 | Generate stunning images using Amazon's Nova Canvas model, supporting following capabilities:
139 | - text-to-image including color-palette support and reference image for style reference
140 | - image-to-image for generating variations
141 | - image-to-image for background replacement
142 |
143 | You'll find all nodes in [this workflow file](workflows/amazon_nova_canvas_nodes.json) or download the png below and drag'n'drop it into ComfyUI
144 |
145 | This workflows showcases all available Amazon Nova Canvas Nodes.
146 | 
147 |
148 |
149 | ### Text to Video with Amazon Nova Reel
150 | Generate engaging videos using Amazon's Nova Reel model, supporting both text-to-video and image-to-video generation.
151 |
152 | Text-to-Video Workflow: Download [this workflow file](workflows/text2vid_nova_reel.json) or the png below and load in ComfyUI
153 | Image-to-Video Workflow: Download [this workflow file](workflows/img2vid_nova_reel.json) or the png below and load in ComfyUI
154 |
155 | This workflows showcases Amazon Nova Reel's capabilities to transform text descriptions or images into dynamic video content.
156 |
157 | 
158 | 
159 |
160 | The workflow combines:
161 | - Nova Reel's text-to-video and image-to-video generation
162 | - Use dimension 1280x720
163 | - Controls for seed and have the option to control_after_generate
164 | - Support for S3 bucket destination configuration
165 |
166 | Example output:
167 | https://github.com/aws-samples/comfyui-llm-node-for-amazon-bedrock/tree/main/assets/text2vid_nova_reel_output_example.mp4
168 | https://github.com/aws-samples/comfyui-llm-node-for-amazon-bedrock/tree/main/assets/img2vid_nova_reel_output_example.mp4
169 |
170 |
171 | ### Text to Video with Luma Ray
172 | Generate high-quality videos from text descriptions using Luma AI's Ray model.
173 |
174 | Download [this workflow file](workflows/text2vid_luma_ray2.json) or the png below and load in ComfyUI
175 |
176 | This workflow demonstrates how to use Luma AI's Ray model through Bedrock to create dynamic videos from text prompts.
177 |
178 | 
179 |
180 | The workflow combines:
181 | - Luma Ray's advanced text-to-video capabilities
182 | - Options to control aspect-ratio, resolution, video duration, destination_bucket, and loop option
183 |
184 | Example output:
185 | https://github.com/aws-samples/comfyui-llm-node-for-amazon-bedrock/tree/main/assets/text2vid_luma_output_example.mp4
186 |
187 | ### Stability AI Models Nodes
188 | Generate stunning images using Stability AIs most recent models, supporting following capabilities:
189 | - text-to-image
190 | - SD3 Large
191 | - SD3.5 Large
192 | - Stable Image Ultra
193 | - Stable Image Core
194 | - image-to-image for generating variations
195 | - SD3 & SD3.5 Large
196 |
197 | You'll find all nodes in [this workflow file](workflows/amazon_stablity_ai.json) or download the png below and drag'n'drop it into ComfyUI
198 |
199 | This workflows showcases all available Amazon Nova Canvas Nodes.
200 | 
201 |
202 | ## Supported models
203 |
204 | Here are models ready for use, more models are coming soon.
205 |
206 | - Luma:
207 | - [X] Ray2
208 |
209 | - Anthropic:
210 | - [X] Claude (1.x, 2.0, 2.1, haiku, sonnet, opus)
211 | - [X] Claude Instant (1.x)
212 |
213 | - Amazon:
214 | - Nova LLM
215 | - [X] Nova Lite
216 | - [X] Nova Pro
217 |
218 | - Nova Canvas
219 | - [X] text to image
220 | - [ ] inpainting
221 | - [ ] outpainting
222 | - [X] image variation
223 | - [ ] image conditioning
224 | - [X] background replacement (with prompt)
225 |
226 | - Nova Reel
227 | - [X] text to video
228 | - [X] image to video
229 |
230 | - Titan Image Generator G1 (1.x)
231 | - [X] text to image
232 | - [X] inpainting
233 | - [X] outpainting
234 | - [X] image variation
235 |
236 |
237 | - Stability AI:
238 | - Stable Diffusion XL (1.0)
239 | - [X] text to image
240 | - [ ] image to image
241 | - [ ] image to image (masking)
242 | - SD3 Large (1.0)
243 | - [X] text to image
244 | - [X] image to image
245 | - SD3.5 Large (1.0)
246 | - [X] text to image
247 | - [X] image to image
248 | - Stable Diffusion Image Core (v1.1)
249 | - [X] text to image
250 | - Stable Diffusion Image Ultra (v1.1)
251 | - [X] text to image
252 |
253 | ## Security
254 |
255 | See [CONTRIBUTING](CONTRIBUTING.md#security-issue-notifications) for more information.
256 |
257 | ## License
258 |
259 | This library is licensed under the MIT-0 License. See the LICENSE file.
260 |
261 |
--------------------------------------------------------------------------------
/__init__.py:
--------------------------------------------------------------------------------
1 | import sys
2 | from pathlib import Path
3 |
4 | here = Path(__file__).parent.resolve()
5 | # sys.path.append(str(Path(here, "nodes")))
6 | import traceback
7 | import importlib
8 |
9 |
10 | def load_nodes():
11 | errors = []
12 | node_class_mappings = {}
13 | node_display_name_mappings = {}
14 |
15 | for filename in (here / "nodes").iterdir():
16 | if filename.suffix != ".py":
17 | continue
18 | module_name = filename.stem
19 | try:
20 | module = importlib.import_module(
21 | f".nodes.{module_name}", package=__package__
22 | )
23 | node_class_mappings.update(getattr(module, "NODE_CLASS_MAPPINGS"))
24 | if hasattr(module, "NODE_DISPLAY_NAME_MAPPINGS"):
25 | node_display_name_mappings.update(
26 | getattr(module, "NODE_DISPLAY_NAME_MAPPINGS")
27 | )
28 |
29 | except AttributeError:
30 | pass # wip nodes
31 | except Exception:
32 | error_message = traceback.format_exc().splitlines()[-1]
33 | errors.append(
34 | f"Failed to import module {module_name} because {error_message}"
35 | )
36 |
37 | if len(errors) > 0:
38 | print(
39 | "Some nodes failed to load:\n\t"
40 | + "\n\t".join(errors)
41 | + "\n\n"
42 | + "Check that you properly installed the dependencies.\n"
43 | + "If you think this is a bug, please report it on the github page (https://github.com/Fannovel16/comfyui_controlnet_aux/issues)"
44 | )
45 | return node_class_mappings, node_display_name_mappings
46 |
47 |
48 | NODE_CLASS_MAPPINGS, NODE_DISPLAY_NAME_MAPPINGS = load_nodes()
49 | print("ComfyUI-Bedrock loaded:\n ", list(NODE_CLASS_MAPPINGS.keys()))
50 |
--------------------------------------------------------------------------------
/assets/base_models_us-east-1.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/aws-samples/comfyui-llm-node-for-amazon-bedrock/b05443acaf45d496a2e8865c950dd84150ec3ffb/assets/base_models_us-east-1.png
--------------------------------------------------------------------------------
/assets/base_models_us-east-2.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/aws-samples/comfyui-llm-node-for-amazon-bedrock/b05443acaf45d496a2e8865c950dd84150ec3ffb/assets/base_models_us-east-2.png
--------------------------------------------------------------------------------
/assets/cat.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/aws-samples/comfyui-llm-node-for-amazon-bedrock/b05443acaf45d496a2e8865c950dd84150ec3ffb/assets/cat.png
--------------------------------------------------------------------------------
/assets/comfyui_on_sagemaker.yaml:
--------------------------------------------------------------------------------
1 | AWSTemplateFormatVersion: 2010-09-09
2 | Description: Sagemaker Jupyter Notebook for ComfyUI
3 | Parameters:
4 | InstanceType:
5 | Description: SageMaker EC2 instance type
6 | Type: String
7 | Default: ml.g5.2xlarge
8 | AllowedValues:
9 | - ml.g5.2xlarge
10 | - ml.g5.4xlarge
11 | - ml.g5.8xlarge
12 | - ml.g6.2xlarge
13 | - ml.g6.4xlarge
14 | - ml.g6.8xlarge
15 | ConstraintDescription: Must be a valid EC2 instance type
16 | Volume:
17 | Type: String
18 | Description: Enter number of volume size(GB)
19 | Default: 200
20 |
21 | Resources:
22 | ExecutionRole:
23 | Type: AWS::IAM::Role
24 | Properties:
25 | AssumeRolePolicyDocument:
26 | Version: '2012-10-17'
27 | Statement:
28 | - Effect: Allow
29 | Principal:
30 | Service:
31 | - sagemaker.amazonaws.com
32 | Action:
33 | - sts:AssumeRole
34 | Path: /
35 | ManagedPolicyArns:
36 | - arn:aws:iam::aws:policy/AmazonSageMakerFullAccess
37 | - arn:aws:iam::aws:policy/AmazonBedrockFullAccess
38 | NotebookInstance:
39 | Type: AWS::SageMaker::NotebookInstance
40 | Properties:
41 | InstanceType: !Ref InstanceType
42 | LifecycleConfigName: !GetAtt LifeCycleConfig.NotebookInstanceLifecycleConfigName
43 | NotebookInstanceName: !Ref AWS::StackName
44 | RoleArn: !GetAtt ExecutionRole.Arn
45 | KmsKeyId: !GetAtt KMSKey.KeyId
46 | DirectInternetAccess: Disabled
47 | SubnetId: !Ref PrivateSubnet1
48 | SecurityGroupIds:
49 | - !Ref PrivateSecurityGroup
50 | VolumeSizeInGB: !Ref Volume
51 | Tags:
52 | - Key: Managed
53 | Value: cloudformation
54 |
55 | LifeCycleConfig:
56 | Type: AWS::SageMaker::NotebookInstanceLifecycleConfig
57 | Properties:
58 | OnCreate:
59 | - Content: !Base64
60 | Fn::Sub: |
61 | #!/bin/bash
62 | set -ex
63 | sudo -u ec2-user -i << EOF
64 | cat > ~/SageMaker/start_comfyui.sh << EOF0
65 | #!/bin/bash
66 | # install comfyui
67 | cd ~/SageMaker
68 | git clone https://github.com/comfyanonymous/ComfyUI.git --branch v0.0.8
69 | cd ComfyUI/custom_nodes/
70 | git clone https://github.com/ltdrdata/ComfyUI-Manager.git
71 | git clone https://github.com/aws-samples/comfyui-llm-node-for-amazon-bedrock.git
72 | git clone https://github.com/WASasquatch/was-node-suite-comfyui.git
73 | git clone https://github.com/pythongosssss/ComfyUI-Custom-Scripts.git
74 | cd ~/SageMaker/ComfyUI
75 | source activate pytorch_p310
76 | python3 -mpip install -r requirements.txt
77 | cd ~/SageMaker/ComfyUI/custom_nodes/comfyui-llm-node-for-amazon-bedrock
78 | python3 -mpip install -r requirements.txt
79 | # start comfyui in background
80 | cd ~/SageMaker/ComfyUI
81 | source activate pytorch_p310
82 | python3 main.py
83 | EOF0
84 | chmod +x ~/SageMaker/start_comfyui.sh
85 | EOF
86 | OnStart:
87 | - Content: !Base64
88 | Fn::Sub: |
89 | #!/bin/bash
90 | set -ex
91 | sudo -u ec2-user -i << EOF
92 | screen -dmS comfyui ~/SageMaker/start_comfyui.sh
93 | EOF
94 | KMSKey:
95 | Type: AWS::KMS::Key
96 | Properties:
97 | EnableKeyRotation: true
98 | VPC:
99 | Type: AWS::EC2::VPC
100 | Properties:
101 | CidrBlock: 10.0.0.0/16
102 | EnableDnsHostnames: true
103 | EnableDnsSupport: true
104 | InstanceTenancy: default
105 | Tags:
106 | - Key: Name
107 | Value: !Ref AWS::StackName
108 |
109 | InternetGateway:
110 | Type: AWS::EC2::InternetGateway
111 | Properties:
112 | Tags:
113 | - Key: Name
114 | Value: !Ref AWS::StackName
115 |
116 | InternetGatewayAttachment:
117 | Type: AWS::EC2::VPCGatewayAttachment
118 | Properties:
119 | InternetGatewayId: !Ref InternetGateway
120 | VpcId: !Ref VPC
121 |
122 | PublicSubnet1:
123 | Type: AWS::EC2::Subnet
124 | Properties:
125 | VpcId: !Ref VPC
126 | AvailabilityZone: !Select
127 | - 0
128 | - !GetAZs ''
129 | CidrBlock: 10.0.1.0/24
130 | MapPublicIpOnLaunch: false
131 | Tags:
132 | - Key: Name
133 | Value: Public Subnet 1
134 |
135 | PrivateSubnet1:
136 | Type: AWS::EC2::Subnet
137 | Properties:
138 | VpcId: !Ref VPC
139 | AvailabilityZone: !Select
140 | - 0
141 | - !GetAZs ''
142 | CidrBlock: 10.0.2.0/24
143 | MapPublicIpOnLaunch: false
144 | Tags:
145 | - Key: Name
146 | Value: !Ref AWS::StackName
147 |
148 | PublicRouteTable:
149 | Type: AWS::EC2::RouteTable
150 | Properties:
151 | VpcId: !Ref VPC
152 | Tags:
153 | - Key: Name
154 | Value: !Ref AWS::StackName
155 |
156 | PublicRoute0:
157 | Type: AWS::EC2::Route
158 | DependsOn: InternetGatewayAttachment
159 | Properties:
160 | RouteTableId: !Ref PublicRouteTable
161 | DestinationCidrBlock: 0.0.0.0/1
162 | GatewayId: !Ref InternetGateway
163 |
164 | PublicRoute1:
165 | Type: AWS::EC2::Route
166 | DependsOn: InternetGatewayAttachment
167 | Properties:
168 | RouteTableId: !Ref PublicRouteTable
169 | DestinationCidrBlock: 128.0.0.0/1
170 | GatewayId: !Ref InternetGateway
171 |
172 | PublicSubnet1RouteTableAssociation:
173 | Type: AWS::EC2::SubnetRouteTableAssociation
174 | Properties:
175 | RouteTableId: !Ref PublicRouteTable
176 | SubnetId: !Ref PublicSubnet1
177 |
178 | NatGatewayEIP:
179 | Type: AWS::EC2::EIP
180 | DependsOn: InternetGatewayAttachment
181 | Properties:
182 | Domain: vpc
183 |
184 | NatGateway:
185 | Type: AWS::EC2::NatGateway
186 | Properties:
187 | AllocationId: !GetAtt NatGatewayEIP.AllocationId
188 | SubnetId: !Ref PublicSubnet1
189 | Tags:
190 | - Key: Name
191 | Value: !Ref AWS::StackName
192 |
193 | PrivateRouteTable:
194 | Type: AWS::EC2::RouteTable
195 | Properties:
196 | VpcId: !Ref VPC
197 | Tags:
198 | - Key: Name
199 | Value: !Ref AWS::StackName
200 |
201 | PrivateRoute:
202 | Type: AWS::EC2::Route
203 | Properties:
204 | RouteTableId: !Ref PrivateRouteTable
205 | DestinationCidrBlock: 0.0.0.0/0
206 | NatGatewayId: !Ref NatGateway
207 |
208 | PrivateSubnet1RouteTableAssociation:
209 | Type: AWS::EC2::SubnetRouteTableAssociation
210 | Properties:
211 | RouteTableId: !Ref PrivateRouteTable
212 | SubnetId: !Ref PrivateSubnet1
213 |
214 | PrivateSecurityGroup:
215 | Type: AWS::EC2::SecurityGroup
216 | Properties:
217 | GroupDescription: Security group for private subnet
218 | VpcId: !Ref VPC
219 | SecurityGroupEgress:
220 | - IpProtocol: tcp
221 | Description: A
222 | FromPort: 80
223 | ToPort: 80
224 | CidrIp: 0.0.0.0/1
225 | - IpProtocol: tcp
226 | Description: A
227 | FromPort: 443
228 | ToPort: 443
229 | CidrIp: 0.0.0.0/1
230 | - IpProtocol: tcp
231 | Description: BCDE
232 | FromPort: 80
233 | ToPort: 80
234 | CidrIp: 128.0.0.0/1
235 | - IpProtocol: tcp
236 | Description: BCDE
237 | FromPort: 443
238 | ToPort: 443
239 | CidrIp: 128.0.0.0/1
240 | Tags:
241 | - Key: Name
242 | Value: !Ref AWS::StackName
243 |
244 | Outputs:
245 | ComfyUIURL:
246 | Description: ComfyUI URL
247 | Value: !Join
248 | - ''
249 | - - https://
250 | - !GetAtt NotebookInstance.NotebookInstanceName
251 | - .notebook.
252 | - !Ref AWS::Region
253 | - .sagemaker.aws/proxy/8188/
254 | NotebookURL:
255 | Description: SageMaker Notebook Instance URL.
256 | Value: !Join
257 | - ''
258 | - - https://
259 | - !Ref AWS::Region
260 | - .console.aws.amazon.com/sagemaker/home?region=
261 | - !Ref AWS::Region
262 | - '#/notebook-instances/openNotebook/'
263 | - !GetAtt NotebookInstance.NotebookInstanceName
264 | - '?view=lab'
265 |
--------------------------------------------------------------------------------
/assets/example_claude3_multimodal.webp:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/aws-samples/comfyui-llm-node-for-amazon-bedrock/b05443acaf45d496a2e8865c950dd84150ec3ffb/assets/example_claude3_multimodal.webp
--------------------------------------------------------------------------------
/assets/example_generate_image_variation.webp:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/aws-samples/comfyui-llm-node-for-amazon-bedrock/b05443acaf45d496a2e8865c950dd84150ec3ffb/assets/example_generate_image_variation.webp
--------------------------------------------------------------------------------
/assets/example_inpainting_with_natural_language.webp:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/aws-samples/comfyui-llm-node-for-amazon-bedrock/b05443acaf45d496a2e8865c950dd84150ec3ffb/assets/example_inpainting_with_natural_language.webp
--------------------------------------------------------------------------------
/assets/example_prompts_refine.webp:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/aws-samples/comfyui-llm-node-for-amazon-bedrock/b05443acaf45d496a2e8865c950dd84150ec3ffb/assets/example_prompts_refine.webp
--------------------------------------------------------------------------------
/assets/example_variation_with_caption.webp:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/aws-samples/comfyui-llm-node-for-amazon-bedrock/b05443acaf45d496a2e8865c950dd84150ec3ffb/assets/example_variation_with_caption.webp
--------------------------------------------------------------------------------
/assets/flying_duck.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/aws-samples/comfyui-llm-node-for-amazon-bedrock/b05443acaf45d496a2e8865c950dd84150ec3ffb/assets/flying_duck.png
--------------------------------------------------------------------------------
/assets/img2vid_nova_reel.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/aws-samples/comfyui-llm-node-for-amazon-bedrock/b05443acaf45d496a2e8865c950dd84150ec3ffb/assets/img2vid_nova_reel.png
--------------------------------------------------------------------------------
/assets/img2vid_nova_reel_output_example.mp4:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/aws-samples/comfyui-llm-node-for-amazon-bedrock/b05443acaf45d496a2e8865c950dd84150ec3ffb/assets/img2vid_nova_reel_output_example.mp4
--------------------------------------------------------------------------------
/assets/man_in_armor.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/aws-samples/comfyui-llm-node-for-amazon-bedrock/b05443acaf45d496a2e8865c950dd84150ec3ffb/assets/man_in_armor.png
--------------------------------------------------------------------------------
/assets/model_access.webp:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/aws-samples/comfyui-llm-node-for-amazon-bedrock/b05443acaf45d496a2e8865c950dd84150ec3ffb/assets/model_access.webp
--------------------------------------------------------------------------------
/assets/nova_canvas_workflows.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/aws-samples/comfyui-llm-node-for-amazon-bedrock/b05443acaf45d496a2e8865c950dd84150ec3ffb/assets/nova_canvas_workflows.png
--------------------------------------------------------------------------------
/assets/nova_video_output.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/aws-samples/comfyui-llm-node-for-amazon-bedrock/b05443acaf45d496a2e8865c950dd84150ec3ffb/assets/nova_video_output.png
--------------------------------------------------------------------------------
/assets/stability_ai_workflows.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/aws-samples/comfyui-llm-node-for-amazon-bedrock/b05443acaf45d496a2e8865c950dd84150ec3ffb/assets/stability_ai_workflows.png
--------------------------------------------------------------------------------
/assets/stack_complete.webp:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/aws-samples/comfyui-llm-node-for-amazon-bedrock/b05443acaf45d496a2e8865c950dd84150ec3ffb/assets/stack_complete.webp
--------------------------------------------------------------------------------
/assets/text2vid_luma_output_example.mp4:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/aws-samples/comfyui-llm-node-for-amazon-bedrock/b05443acaf45d496a2e8865c950dd84150ec3ffb/assets/text2vid_luma_output_example.mp4
--------------------------------------------------------------------------------
/assets/text2vid_luma_ray.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/aws-samples/comfyui-llm-node-for-amazon-bedrock/b05443acaf45d496a2e8865c950dd84150ec3ffb/assets/text2vid_luma_ray.png
--------------------------------------------------------------------------------
/assets/text2vid_nova_reel.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/aws-samples/comfyui-llm-node-for-amazon-bedrock/b05443acaf45d496a2e8865c950dd84150ec3ffb/assets/text2vid_nova_reel.png
--------------------------------------------------------------------------------
/assets/text2vid_nova_reel_output_example.mp4:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/aws-samples/comfyui-llm-node-for-amazon-bedrock/b05443acaf45d496a2e8865c950dd84150ec3ffb/assets/text2vid_nova_reel_output_example.mp4
--------------------------------------------------------------------------------
/images/edit12.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/aws-samples/comfyui-llm-node-for-amazon-bedrock/b05443acaf45d496a2e8865c950dd84150ec3ffb/images/edit12.png
--------------------------------------------------------------------------------
/images/edit7.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/aws-samples/comfyui-llm-node-for-amazon-bedrock/b05443acaf45d496a2e8865c950dd84150ec3ffb/images/edit7.png
--------------------------------------------------------------------------------
/images/gen9.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/aws-samples/comfyui-llm-node-for-amazon-bedrock/b05443acaf45d496a2e8865c950dd84150ec3ffb/images/gen9.png
--------------------------------------------------------------------------------
/images/ref7.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/aws-samples/comfyui-llm-node-for-amazon-bedrock/b05443acaf45d496a2e8865c950dd84150ec3ffb/images/ref7.jpg
--------------------------------------------------------------------------------
/nodes/bedrock_luma_ray_video.py:
--------------------------------------------------------------------------------
1 | import json
2 | import os
3 | from io import BytesIO
4 | from datetime import datetime
5 | import boto3
6 | from PIL import Image
7 | import numpy as np
8 | import time
9 |
10 | MAX_RETRY = 3
11 |
12 |
13 | def get_default_region():
14 | return "us-west-2" # Use us-west-2 for Luma AI Ray 2
15 |
16 |
17 | s3_client = boto3.client("s3", region_name=get_default_region())
18 |
19 |
20 | def get_account_id():
21 | sts_client = boto3.client("sts", region_name=get_default_region())
22 | return sts_client.get_caller_identity().get("Account")
23 |
24 |
25 | def is_video_downloaded_for_invocation_job(invocation_job, output_folder="output"):
26 | invocation_arn = invocation_job["invocationArn"]
27 | invocation_id = invocation_arn.split("/")[-1]
28 | folder_name = get_folder_name_for_job(invocation_job)
29 | output_folder = os.path.abspath(f"{output_folder}/{folder_name}")
30 | file_name = f"{invocation_id}.mp4"
31 | local_file_path = os.path.join(output_folder, file_name)
32 | return os.path.exists(local_file_path)
33 |
34 |
35 | def get_folder_name_for_job(invocation_job):
36 | invocation_arn = invocation_job["invocationArn"]
37 | invocation_id = invocation_arn.split("/")[-1]
38 | submit_time = invocation_job["submitTime"]
39 | timestamp = submit_time.astimezone().strftime("%Y-%m-%d_%H-%M-%S")
40 | folder_name = f"{timestamp}_{invocation_id}"
41 | return folder_name
42 |
43 |
44 | def download_video_for_invocation_arn(invocation_arn, bucket_name, destination_folder):
45 | invocation_id = invocation_arn.split("/")[-1]
46 | file_name = f"{invocation_id}.mp4"
47 | output_folder = os.path.abspath(destination_folder)
48 | local_file_path = os.path.join(output_folder, file_name)
49 | os.makedirs(output_folder, exist_ok=True)
50 | s3 = boto3.client("s3", region_name=get_default_region())
51 | response = s3.list_objects_v2(Bucket=bucket_name, Prefix=invocation_id)
52 | for obj in response.get("Contents", []):
53 | object_key = obj["Key"]
54 | if object_key.endswith(".mp4"):
55 | print(f'Downloading "{object_key}"...')
56 | s3.download_file(bucket_name, object_key, local_file_path)
57 | print(f"Downloaded to {local_file_path}")
58 | return local_file_path
59 | print(f"Problem: No MP4 file was found in S3 at {bucket_name}/{invocation_id}")
60 |
61 |
62 | def get_job_id_from_arn(invocation_arn):
63 | return invocation_arn.split("/")[-1]
64 |
65 |
66 | def save_completed_job(job, output_folder="output"):
67 | job_id = get_job_id_from_arn(job["invocationArn"])
68 | output_folder_abs = os.path.abspath(
69 | f"{output_folder}/{get_folder_name_for_job(job)}"
70 | )
71 | os.makedirs(output_folder_abs, exist_ok=True)
72 | if is_video_downloaded_for_invocation_job(job, output_folder=output_folder):
73 | print(f"Skipping completed job {job_id}, video already downloaded.")
74 | return
75 | s3_bucket_name = (
76 | job["outputDataConfig"]["s3OutputDataConfig"]["s3Uri"]
77 | .split("//")[1]
78 | .split("/")[0]
79 | )
80 | localPath = download_video_for_invocation_arn(
81 | job["invocationArn"], s3_bucket_name, output_folder_abs
82 | )
83 | return localPath
84 |
85 |
86 | bedrock_runtime_client = boto3.client(
87 | "bedrock-runtime", region_name=get_default_region()
88 | )
89 | region = get_default_region()
90 | account_id = get_account_id()
91 |
92 |
93 | class BedrockLumaVideo:
94 | @classmethod
95 | def INPUT_TYPES(s):
96 | return {
97 | "required": {
98 | "prompt": ("STRING", {"multiline": True}),
99 | "aspect_ratio": (
100 | ["16:9", "1:1", "3:4", "4:3", "9:16", "21:9", "9:21"],
101 | ),
102 | "resolution": (["540p", "720p"],),
103 | "duration": (["5s", "9s"],),
104 | "destination_bucket": (
105 | [b["Name"] for b in s3_client.list_buckets()["Buckets"]],
106 | ),
107 | "loop_video": (["False", "True"],),
108 | },
109 | }
110 |
111 | RETURN_TYPES = ("STRING",)
112 | FUNCTION = "forward"
113 | CATEGORY = "aws"
114 |
115 | def forward(self, **kwargs):
116 | prompt = kwargs.get("prompt")
117 | aspect_ratio = kwargs.get("aspect_ratio")
118 | resolution = kwargs.get("resolution")
119 | duration = kwargs.get("duration")
120 | loop_video = kwargs.get("loop_video")
121 | s3_destination_bucket = kwargs.get("destination_bucket")
122 |
123 | model_input_body = {
124 | "prompt": prompt,
125 | "aspect_ratio": aspect_ratio,
126 | "resolution": resolution,
127 | "loop": loop_video,
128 | "duration": duration,
129 | }
130 |
131 | # Start asynchronous invocation
132 | invocation_response = bedrock_runtime_client.start_async_invoke(
133 | modelId="luma.ray-v2:0",
134 | modelInput=model_input_body,
135 | outputDataConfig={
136 | "s3OutputDataConfig": {"s3Uri": f"s3://{s3_destination_bucket}"}
137 | },
138 | )
139 |
140 | invocation_arn = invocation_response["invocationArn"]
141 | print("\nInvocation Response:")
142 | print(json.dumps(invocation_response, indent=2))
143 |
144 | # Poll for job completion
145 | save_local_path = ""
146 | while True:
147 | job_update_response = bedrock_runtime_client.get_async_invoke(
148 | invocationArn=invocation_arn
149 | )
150 | status = job_update_response["status"]
151 |
152 | if status == "Completed":
153 | save_local_path = save_completed_job(
154 | job_update_response,
155 | output_folder=os.path.expanduser("~/ComfyUI/output/"),
156 | )
157 | break
158 | elif status == "Failed":
159 | print(f"Job failed: {job_update_response}")
160 | raise Exception(f"Job failed with details: {job_update_response}")
161 | else:
162 | time.sleep(15)
163 |
164 | return (save_local_path,)
165 |
166 | NODE_CLASS_MAPPINGS = {
167 | "Amazon Bedrock - Luma AI Ray Video": BedrockLumaVideo,
168 | }
169 |
--------------------------------------------------------------------------------
/nodes/bedrock_nova_video.py:
--------------------------------------------------------------------------------
1 | import json
2 | import os
3 | import base64
4 | from io import BytesIO
5 | from random import randint
6 | from datetime import datetime
7 | from retry import retry
8 | from PIL import Image
9 | import numpy as np
10 | from .session import get_client
11 | import time
12 | import boto3
13 |
14 | MAX_RETRY = 3
15 | def get_default_region():
16 | session = boto3.Session()
17 | return session.region_name
18 |
19 | def get_account_id():
20 | sts_client = boto3.client('sts')
21 | return sts_client.get_caller_identity().get('Account')
22 |
23 | s3_client = boto3.client("s3", region_name=get_default_region())
24 | bedrock_runtime_client = get_client(service_name="bedrock-runtime")
25 | region = get_default_region()
26 | account_id = get_account_id()
27 |
28 | def is_video_downloaded_for_invocation_job(invocation_job, output_folder="output"):
29 | """
30 | This function checks if the video file for the given invocation job has been downloaded.
31 | """
32 | invocation_arn = invocation_job["invocationArn"]
33 | invocation_id = invocation_arn.split("/")[-1]
34 | folder_name = get_folder_name_for_job(invocation_job)
35 | output_folder = os.path.abspath(f"{output_folder}/{folder_name}")
36 | file_name = f"{invocation_id}.mp4"
37 | local_file_path = os.path.join(output_folder, file_name)
38 | return os.path.exists(local_file_path)
39 |
40 | def get_folder_name_for_job(invocation_job):
41 | invocation_arn = invocation_job["invocationArn"]
42 | invocation_id = invocation_arn.split("/")[-1]
43 | submit_time = invocation_job["submitTime"]
44 | timestamp = submit_time.astimezone().strftime("%Y-%m-%d_%H-%M-%S")
45 | folder_name = f"{timestamp}_{invocation_id}"
46 | return folder_name
47 |
48 | def download_video_for_invocation_arn(invocation_arn, bucket_name, destination_folder):
49 | """
50 | This function downloads the video file for the given invocation ARN.
51 | """
52 | invocation_id = invocation_arn.split("/")[-1]
53 |
54 | # Create the local file path
55 | file_name = f"{invocation_id}.mp4"
56 | import os
57 |
58 | output_folder = os.path.abspath(destination_folder)
59 | local_file_path = os.path.join(output_folder, file_name)
60 |
61 | # Ensure the output folder exists
62 | os.makedirs(output_folder, exist_ok=True)
63 |
64 | # Create an S3 client
65 | s3 = boto3.client("s3")
66 |
67 | # List objects in the specified folder
68 | response = s3.list_objects_v2(Bucket=bucket_name, Prefix=invocation_id)
69 |
70 | # Find the first MP4 file and download it.
71 | for obj in response.get("Contents", []):
72 | object_key = obj["Key"]
73 | if object_key.endswith(".mp4"):
74 | print(f"""Downloading "{object_key}"...""")
75 | s3.download_file(bucket_name, object_key, local_file_path)
76 | print(f"Downloaded to {local_file_path}")
77 | return local_file_path
78 |
79 | # If we reach this point, no MP4 file was found.
80 | print(f"Problem: No MP4 file was found in S3 at {bucket_name}/{invocation_id}")
81 |
82 | def get_job_id_from_arn(invocation_arn):
83 | return invocation_arn.split("/")[-1]
84 |
85 | def save_completed_job(job, output_folder="output"):
86 | job_id = get_job_id_from_arn(job["invocationArn"])
87 |
88 | output_folder_abs = os.path.abspath(
89 | f"{output_folder}/{get_folder_name_for_job(job)}"
90 | )
91 |
92 | # Ensure the output folder exists
93 | os.makedirs(output_folder_abs, exist_ok=True)
94 |
95 | status_file = os.path.join(output_folder_abs, "completed.json")
96 |
97 | if is_video_downloaded_for_invocation_job(job, output_folder=output_folder):
98 | print(f"Skipping completed job {job_id}, video already downloaded.")
99 | return
100 |
101 | s3_bucket_name = (
102 | job["outputDataConfig"]["s3OutputDataConfig"]["s3Uri"]
103 | .split("//")[1]
104 | .split("/")[0]
105 | )
106 |
107 | localPath = download_video_for_invocation_arn(
108 | job["invocationArn"], s3_bucket_name, output_folder_abs
109 | )
110 | return localPath
111 |
112 | class BedrockNovaVideo:
113 | @classmethod
114 | def INPUT_TYPES(s):
115 | return {
116 | "required": {
117 | "prompt": ("STRING", {"multiline": True}),
118 | "dimension": (
119 | [
120 | "1280x720"
121 | ],
122 | ),
123 | "seed": (
124 | "INT",
125 | {
126 | "default": 0,
127 | "min": 0,
128 | "max": 2147483646,
129 | "step": 1,
130 | "round": 1, # The value represeting the precision to round to, will be set to the step value by default. Can be set to False to disable rounding.
131 | "display": "number",
132 | },
133 | ),
134 | "destination_bucket": (
135 | [b["Name"] for b in s3_client.list_buckets()["Buckets"]],
136 | ),
137 | },
138 | "optional": {
139 | "image": ("IMAGE",),
140 | },
141 | }
142 |
143 | RETURN_TYPES = ("STRING",)
144 | FUNCTION = "forward"
145 | CATEGORY = "aws"
146 |
147 | @retry(tries=MAX_RETRY)
148 | def forward(self, **kwargs):
149 | prompt = kwargs.get('prompt')
150 | dimension = kwargs.get('dimension')
151 | seed = kwargs.get('seed')
152 | image = kwargs.get('image')
153 | s3_destination_bucket = kwargs.get("destination_bucket")
154 |
155 |
156 | input_image_base64=None
157 | if image is not None:
158 | image = image[0] * 255.0
159 | image = Image.fromarray(image.clamp(0, 255).numpy().round().astype(np.uint8))
160 | buffer = BytesIO()
161 | image.save(buffer, format="PNG")
162 |
163 | image_data = buffer.getvalue()
164 | input_image_base64 = base64.b64encode(image_data).decode("utf-8")
165 |
166 | textToVideoParams={ "text": prompt}
167 | if input_image_base64 is not None:
168 | textToVideoParams["images"] = [
169 | {
170 | "format": "png", # May be "png" or "jpeg"
171 | "source": {
172 | "bytes": input_image_base64
173 | }
174 | }
175 | ]
176 | model_input = {
177 | "taskType": "TEXT_VIDEO",
178 | "textToVideoParams": textToVideoParams,
179 | "videoGenerationConfig": {
180 | "durationSeconds": 6, # 6 is the only supported value currently.
181 | "fps": 24, # 24 is the only supported value currently.
182 | "dimension": dimension, # "1280x720" is the only supported value currently.
183 | "seed": seed # A random seed guarantees we'll get a different result each time this code runs.
184 | },
185 | }
186 |
187 |
188 | invocation = bedrock_runtime_client.start_async_invoke(
189 | modelId="amazon.nova-reel-v1:0",
190 | modelInput=model_input,
191 | outputDataConfig={"s3OutputDataConfig": {"s3Uri": f"s3://{s3_destination_bucket}"}},
192 | )
193 |
194 | invocation_arn = invocation["invocationArn"]
195 | print("\nResponse:")
196 | print(json.dumps(invocation, indent=2, default=str))
197 |
198 | save_local_path = ""
199 |
200 | # Save the invocation details for later reference. Helpful for debugging and reporting feedback.
201 | while True:
202 | job_update = bedrock_runtime_client.get_async_invoke(invocationArn=invocation_arn)
203 | status = job_update["status"]
204 | start_time = time.time()
205 |
206 | if status == "Completed":
207 | save_local_path = save_completed_job(job_update,
208 | output_folder=os.path.expanduser("~/ComfyUI/output/"))
209 | break
210 | else:
211 | elapsed_time = time.time() - start_time
212 | if elapsed_time > 600:
213 | print("Job timed out after 60 seconds.")
214 | break
215 | else:
216 | time.sleep(5)
217 | return (save_local_path,)
218 |
219 | NODE_CLASS_MAPPINGS = {
220 | "Amazon Bedrock - Nova Reel Video": BedrockNovaVideo,
221 | }
--------------------------------------------------------------------------------
/nodes/bedrock_stability_image.py:
--------------------------------------------------------------------------------
1 | import json
2 | import os
3 | import re
4 | import base64
5 | from io import BytesIO
6 | from retry import retry
7 | from PIL import Image
8 | import numpy as np
9 | import torch
10 | import boto3
11 | import folder_paths
12 |
13 | MAX_RETRY = 2
14 | DEBUG_MODE = False
15 |
16 | # Rely on ComfyUI Paths
17 | comfyui_root = folder_paths.base_path
18 | output_directory = f"{comfyui_root}/output"
19 | bedrock = boto3.client(service_name="bedrock-runtime", region_name="us-west-2")
20 |
21 | # Model IDs
22 | MODEL_ID_SD3_LARGE = "stability.sd3-large-v1:0"
23 | MODEL_ID_SD3_5_LARGE = "stability.sd3-5-large-v1:0"
24 | MODEL_ID_STABLE_IMAGE_CORE = "stability.stable-image-core-v1:1"
25 | MODEL_ID_STABLE_IMAGE_ULTRA = "stability.stable-image-ultra-v1:1"
26 |
27 |
28 | def encode_image(image_tensor: torch.Tensor) -> str:
29 | """Convert ComfyUI image tensor to Base64 string"""
30 | image_np = image_tensor.cpu().numpy()[0] * 255
31 | image_np = image_np.astype(np.uint8)
32 |
33 | img = Image.fromarray(image_np)
34 | buffered = BytesIO()
35 | img.save(buffered, format="PNG")
36 |
37 | return base64.b64encode(buffered.getvalue()).decode("utf-8")
38 |
39 |
40 | def parse_resolution(resolution_str: str) -> tuple:
41 | """Extract width/height from resolution string"""
42 | match = re.findall(r"\d+", resolution_str)
43 | if len(match) != 2:
44 | raise ValueError(f"Invalid resolution format: {resolution_str}")
45 | return int(match[0]), int(match[1])
46 |
47 |
48 | def resolution_to_aspect_ratio(resolution_str: str) -> str:
49 | """Convert resolution string to aspect ratio string for Stability AI models"""
50 | # Direct mapping for common resolutions
51 | aspect_ratio_map = {
52 | "1024 x 1024": "1:1",
53 | "1088 x 896": "5:4",
54 | "1152 x 896": "5:4",
55 | "1216 x 832": "16:9",
56 | "1344 x 768": "16:9",
57 | "1536 x 640": "21:9",
58 | "640 x 1536": "9:21",
59 | "768 x 1344": "9:16",
60 | "832 x 1216": "9:16",
61 | "896 x 1088": "4:5",
62 | "896 x 1152": "4:5",
63 | "512 x 512": "1:1",
64 | "512 x 640": "4:5",
65 | "640 x 512": "5:4",
66 | "768 x 768": "1:1",
67 | "1536 x 1536": "1:1",
68 | "1792 x 1344": "4:3",
69 | "1920 x 1280": "3:2",
70 | "2048 x 1024": "2:1",
71 | "1024 x 2048": "1:2",
72 | "1280 x 1920": "2:3",
73 | "1344 x 1792": "3:4",
74 | }
75 |
76 | # If resolution is in the map, return the corresponding aspect ratio
77 | if resolution_str in aspect_ratio_map:
78 | return aspect_ratio_map[resolution_str]
79 |
80 | # Otherwise, calculate it
81 | width, height = parse_resolution(resolution_str)
82 |
83 | # Find greatest common divisor
84 | def gcd(a, b):
85 | while b:
86 | a, b = b, a % b
87 | return a
88 |
89 | divisor = gcd(width, height)
90 | return f"{width // divisor}:{height // divisor}"
91 |
92 |
93 | @retry(tries=MAX_RETRY)
94 | def generate_images(
95 | inference_params,
96 | model_id,
97 | output_directory=output_directory,
98 | ):
99 | """Generate images using AWS Bedrock API"""
100 | os.makedirs(output_directory, exist_ok=True)
101 |
102 | # Display seed if available
103 | if "seed" in inference_params:
104 | print(f"Using seed: {inference_params['seed']}")
105 |
106 | body_json = json.dumps(inference_params, indent=2)
107 |
108 | # For debugging
109 | if DEBUG_MODE:
110 | request_file_path = os.path.join(output_directory, "request.json")
111 | with open(request_file_path, "w") as f:
112 | f.write(body_json)
113 |
114 | try:
115 | response = bedrock.invoke_model(
116 | body=body_json,
117 | modelId=model_id,
118 | accept="application/json",
119 | contentType="application/json",
120 | )
121 |
122 | response_body = json.loads(response.get("body").read())
123 |
124 | if DEBUG_MODE:
125 | response_metadata = response.get("ResponseMetadata")
126 | # Write response metadata to JSON file
127 | response_metadata_file_path = os.path.join(
128 | output_directory, "response_metadata.json"
129 | )
130 | with open(response_metadata_file_path, "w") as f:
131 | json.dump(response_metadata, f, indent=2)
132 |
133 | # Write response body to JSON file
134 | response_file_path = os.path.join(output_directory, "response_body.json")
135 | with open(response_file_path, "w") as f:
136 | json.dump(response_body, f, indent=2)
137 |
138 | # Log the request ID
139 | print(f"Request ID: {response['ResponseMetadata']['RequestId']}")
140 |
141 | # Check for non-exception errors
142 | if "error" in response_body:
143 | error_msg = response_body["error"]
144 | if "blocked by our content filters" in error_msg:
145 | raise ValueError(f"Content moderation blocked generation: {error_msg}")
146 | else:
147 | raise ValueError(f"API Error: {error_msg}")
148 |
149 | # Check for artifacts key in SD3/SD3.5 responses
150 | if "images" in response_body:
151 | return response_body
152 | else:
153 | raise ValueError("No images generated - empty response from API")
154 |
155 | except Exception as e:
156 | # If e has "response", write it to disk as JSON
157 | if hasattr(e, "response"):
158 | if DEBUG_MODE:
159 | error_response = e.response
160 | error_response_file_path = os.path.join(
161 | output_directory, "error_response.json"
162 | )
163 | with open(error_response_file_path, "w") as f:
164 | json.dump(error_response, f, indent=2)
165 | print(e)
166 | raise e
167 |
168 |
169 | class BedrockStabilityText2Image:
170 | """ComfyUI node for Stability AI models text-to-image"""
171 |
172 | @classmethod
173 | def INPUT_TYPES(s):
174 | return {
175 | "required": {
176 | "model": (
177 | [
178 | "SD3_Large",
179 | "SD3.5_Large",
180 | "Stable_Image_Core",
181 | "Stable_Image_Ultra",
182 | ],
183 | {"default": "Stable_Image_Core"},
184 | ),
185 | "prompt": ("STRING", {"multiline": True}),
186 | "resolution": (
187 | [
188 | "1024 x 1024",
189 | "1088 x 896",
190 | "1216 x 832",
191 | "1344 x 768",
192 | "1536 x 640",
193 | "640 x 1536",
194 | "768 x 1344",
195 | "832 x 1216",
196 | "896 x 1088",
197 | ],
198 | ),
199 | "seed": (
200 | "INT",
201 | {
202 | "default": 0,
203 | "min": 0,
204 | "max": 4294967295,
205 | "step": 1,
206 | "round": 1,
207 | "display": "number",
208 | },
209 | ),
210 | },
211 | "optional": {
212 | "negative_prompt": (
213 | "STRING",
214 | {
215 | "multiline": True,
216 | "placeholder": "Negative prompt e.g., low quality, blurry, distorted",
217 | },
218 | ),
219 | },
220 | }
221 |
222 | RETURN_TYPES = ("IMAGE",)
223 | FUNCTION = "forward"
224 | CATEGORY = "aws"
225 |
226 | def forward(
227 | self,
228 | model,
229 | prompt,
230 | resolution,
231 | seed,
232 | negative_prompt=None,
233 | ):
234 |
235 | aspect_ratio = resolution_to_aspect_ratio(resolution)
236 |
237 | if model == "SD3_Large":
238 | mode_id_input = MODEL_ID_SD3_LARGE
239 | elif model == "SD3.5_Large":
240 | mode_id_input = MODEL_ID_SD3_5_LARGE
241 | elif model == "Stable_Image_Core":
242 | mode_id_input = MODEL_ID_STABLE_IMAGE_CORE
243 | elif model == "Stable_Image_Ultra":
244 | mode_id_input = MODEL_ID_STABLE_IMAGE_ULTRA
245 |
246 | # Build parameters for Stability AI Models
247 | params = {
248 | "mode": "text-to-image",
249 | "prompt": prompt,
250 | "aspect_ratio": aspect_ratio,
251 | "output_format": "png",
252 | "seed": seed,
253 | }
254 |
255 | # Add negative prompt if provided
256 | if negative_prompt:
257 | params["negative_prompt"] = negative_prompt
258 |
259 | response_body = generate_images(
260 | inference_params=params,
261 | model_id=mode_id_input,
262 | )
263 |
264 | # Process response
265 | if "images" in response_body:
266 | base64_output_image = response_body["images"][0]
267 | image_data = base64.b64decode(base64_output_image)
268 | image = Image.open(BytesIO(image_data))
269 | else:
270 | raise ValueError("No successful images generated")
271 |
272 | # Convert to ComfyUI format
273 | result = torch.from_numpy(np.array(image).astype(np.float32) / 255.0).unsqueeze(
274 | 0
275 | )
276 |
277 | return (result,)
278 |
279 |
280 | class BedrockSD3xImage2Image:
281 | """ComfyUI node for Stability Diffusion 3 & 3.5 Large image-to-image"""
282 |
283 | @classmethod
284 | def INPUT_TYPES(s):
285 | return {
286 | "required": {
287 | "image": ("IMAGE", {"forceInput": True}),
288 | "model": (
289 | [
290 | "SD3_Large",
291 | "SD3.5_Large",
292 | ],
293 | {"default": "SD3.5_Large"},
294 | ),
295 | "prompt": ("STRING", {"multiline": True}),
296 | "seed": (
297 | "INT",
298 | {
299 | "default": 0,
300 | "min": 0,
301 | "max": 4294967295,
302 | "step": 1,
303 | "round": 1,
304 | "display": "number",
305 | },
306 | ),
307 | },
308 | "optional": {
309 | "strength": (
310 | "FLOAT",
311 | {
312 | "default": 0.6,
313 | "min": 0.0,
314 | "max": 1.0,
315 | "step": 0.01,
316 | "round": 0.01,
317 | "display": "slider",
318 | },
319 | ),
320 | "negative_prompt": (
321 | "STRING",
322 | {
323 | "multiline": True,
324 | "placeholder": "Negative prompt e.g., low quality, blurry, distorted",
325 | },
326 | ),
327 | },
328 | }
329 |
330 | RETURN_TYPES = ("IMAGE",)
331 | FUNCTION = "forward"
332 | CATEGORY = "aws"
333 |
334 | def forward(
335 | self,
336 | image,
337 | model,
338 | prompt,
339 | seed,
340 | strength,
341 | negative_prompt=None,
342 | ):
343 |
344 | if model == "SD3_Large":
345 | mode_id_input = MODEL_ID_SD3_LARGE
346 | elif model == "SD3.5_Large":
347 | mode_id_input = MODEL_ID_SD3_5_LARGE
348 |
349 | # Build parameters for Stability AI Models
350 | params = {
351 | "mode": "image-to-image",
352 | "prompt": prompt,
353 | "image": encode_image(image),
354 | "strength": strength,
355 | "output_format": "png",
356 | "seed": seed
357 | }
358 |
359 | # Add negative prompt if provided
360 | if negative_prompt:
361 | params["negative_prompt"] = negative_prompt
362 |
363 | response_body = generate_images(
364 | inference_params=params,
365 | model_id=mode_id_input,
366 | )
367 |
368 | # Process response
369 | if "images" in response_body:
370 | base64_output_image = response_body["images"][0]
371 | image_data = base64.b64decode(base64_output_image)
372 | image = Image.open(BytesIO(image_data))
373 | else:
374 | raise ValueError("No successful images generated")
375 |
376 | # Convert to ComfyUI format
377 | result = torch.from_numpy(np.array(image).astype(np.float32) / 255.0).unsqueeze(
378 | 0
379 | )
380 |
381 | return (result,)
382 |
383 |
384 | # Register all node classes for ComfyUI
385 | NODE_CLASS_MAPPINGS = {
386 | "Amazon Bedrock - Stability AI Models | Text to Image": BedrockStabilityText2Image,
387 | "Amazon Bedrock - SD3 & SD3.5 Large | Image to Image": BedrockSD3xImage2Image,
388 | }
389 |
--------------------------------------------------------------------------------
/nodes/file_utils.py:
--------------------------------------------------------------------------------
1 | import base64
2 | import io
3 | import os
4 | from datetime import datetime
5 |
6 | from PIL import Image
7 |
8 |
9 | def save_base64_image(base64_image, output_directory, base_name="image", suffix="_1"):
10 | """
11 | Saves a base64 encoded image to a specified output directory with a timestamp and a suffix.
12 |
13 | Args:
14 | base64_image (str): The base64 encoded image string.
15 | output_directory (str): The directory where the image will be saved.
16 | suffix (str, optional): A suffix to be added to the filename. Defaults to "_1".
17 | Returns:
18 | PIL.Image.Image: The Pillow Image object representing the saved image.
19 | """
20 | image_bytes = base64.b64decode(base64_image)
21 | image = Image.open(io.BytesIO(image_bytes))
22 | save_image(image, output_directory, base_name, suffix)
23 | return image
24 |
25 |
26 | def save_image(image, output_directory, base_name="image", suffix="_1"):
27 | """
28 | Saves a Pillow Image object to a specified output directory with a timestamp and a suffix.
29 |
30 | Args:
31 | image (PIL.Image.Image): The Pillow Image object to be saved.
32 | output_directory (str): The directory where the image will be saved.
33 | suffix (str, optional): A suffix to be added to the filename. Defaults to "_1".
34 | Returns:
35 | None
36 | """
37 | if not os.path.exists(output_directory):
38 | os.makedirs(output_directory)
39 |
40 | file_name = f"{base_name}{suffix}.png"
41 | file_path = os.path.join(output_directory, file_name)
42 | image.save(file_path)
43 |
44 |
45 | def save_base64_images(base64_images, output_directory, base_name="image"):
46 | """
47 | Saves a list of base64 encoded images to a specified output directory.
48 |
49 | Args:
50 | base64_images (list): A list of base64 encoded image strings.
51 | output_directory (str): The directory where the images will be saved.
52 | Returns:
53 | An array of Pillow Image objects representing the saved images.
54 | """
55 | images = []
56 | for i, base64_image in enumerate(base64_images):
57 | image = save_base64_image(
58 | base64_image, output_directory, base_name=base_name, suffix=f"_{i+1}"
59 | )
60 | images.append(image)
61 |
62 | return images
63 |
--------------------------------------------------------------------------------
/nodes/json.py:
--------------------------------------------------------------------------------
1 | import json
2 |
3 | JSON_KEY_NUMBERS = 4
4 |
5 | class JSONTextExtraction:
6 | @classmethod
7 | def INPUT_TYPES(s):
8 | return {
9 | "required": {
10 | "json_text": ("STRING", {"multiline": False}),
11 | },
12 | "optional":{
13 | f"key{i+1}": ("STRING", {"multiline": False})
14 | for i in range(JSON_KEY_NUMBERS)
15 | }
16 | }
17 |
18 | RETURN_TYPES = ("STRING","STRING","STRING","STRING",)
19 | FUNCTION = "process"
20 | CATEGORY = "aws"
21 |
22 | def process(self, json_text, **kwargs):
23 | # Parse the JSON text
24 | data = json.loads(json_text)
25 | output = ()
26 | # loop 4 times
27 | for i in range(JSON_KEY_NUMBERS):
28 | key = f"key{i+1}"
29 | if key in kwargs:
30 | output += (data.get(kwargs[key], ""),)
31 | else:
32 | output += ("",)
33 |
34 | return output
35 |
36 | NODE_CLASS_MAPPINGS = {
37 | "JSON Text Extraction": JSONTextExtraction,
38 | }
--------------------------------------------------------------------------------
/nodes/prompts.py:
--------------------------------------------------------------------------------
1 | import re
2 |
3 |
4 | class PromptTemplate:
5 | @classmethod
6 | def INPUT_TYPES(s):
7 | return {
8 | "required": {
9 | "prompt": ("STRING", {"multiline": True}),
10 | "prompt_template": ("STRING", {"multiline": True}),
11 | }
12 | }
13 |
14 | RETURN_TYPES = ("STRING",)
15 | FUNCTION = "forward"
16 |
17 | CATEGORY = "utils"
18 |
19 | def forward(self, prompt, prompt_template):
20 | output = prompt_template.replace("{prompt}", prompt).replace("[prompt]", prompt)
21 | #print("output prompt:",output)
22 | return (output,)
23 |
24 | class PromptTemplateTwoInput:
25 | @classmethod
26 | def INPUT_TYPES(s):
27 | return {
28 | "required": {
29 | "prompt": ("STRING", {"multiline": True}),
30 | "prompt2": ("STRING", {"multiline": True}),
31 | "prompt_template": ("STRING", {"multiline": True}),
32 | }
33 | }
34 |
35 | RETURN_TYPES = ("STRING",)
36 | FUNCTION = "forward"
37 |
38 | CATEGORY = "utils"
39 |
40 | def forward(self, prompt, prompt2, prompt_template):
41 | output = prompt_template.replace("{prompt}", prompt).replace("[prompt]", prompt)
42 | output = output.replace("{prompt2}", prompt2).replace("[prompt2]", prompt2)
43 | return (output,)
44 |
45 | class PromptRegexRemove:
46 | @classmethod
47 | def INPUT_TYPES(s):
48 | return {
49 | "required": {
50 | "prompt": ("STRING", {"multiline": True}),
51 | "regex_string": ("STRING", {"multiline": True}),
52 | }
53 | }
54 |
55 | RETURN_TYPES = ("STRING",)
56 | FUNCTION = "replace"
57 |
58 | CATEGORY = "utils"
59 |
60 | def replace(self, prompt, regex_string):
61 | output = re.sub(regex_string, "", prompt)
62 | return (output,)
63 |
64 |
65 | NODE_CLASS_MAPPINGS = {
66 | "Prompt Template": PromptTemplate,
67 | "Prompt Template with Two Inputs": PromptTemplateTwoInput,
68 | "Prompt Regex Remove": PromptRegexRemove,
69 | }
70 |
--------------------------------------------------------------------------------
/nodes/s3.py:
--------------------------------------------------------------------------------
1 | import json
2 | import os
3 | import re
4 | import base64
5 | from io import BytesIO
6 |
7 | import requests
8 | from retry import retry
9 | import boto3
10 |
11 |
12 | from PIL import Image
13 | import numpy as np
14 | import torch
15 |
16 |
17 | from .session import get_client
18 |
19 |
20 | s3_client = get_client(service_name="s3")
21 |
22 | MAX_RETRY = 3
23 |
24 |
25 | class ImageFromURL:
26 | @classmethod
27 | def INPUT_TYPES(s):
28 | return {
29 | "required": {
30 | "url": ("STRING", {"multiline": False}),
31 | }
32 | }
33 |
34 | RETURN_TYPES = ("IMAGE",)
35 | FUNCTION = "forward"
36 | CATEGORY = "aws"
37 |
38 | def download_s3(self, bucket, key):
39 | response = s3_client.get_object(Bucket=bucket, Key=key)
40 | image = Image.open(response["Body"])
41 | image = torch.from_numpy(np.array(image).astype(np.float32) / 255.0).unsqueeze(
42 | 0
43 | )
44 | return image
45 |
46 | def download_http(self, url):
47 | headers = {
48 | "User-Agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/127.0.0.0 Safari/537.36",
49 | "Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8",
50 | "Accept-Language": "en-US,en;q=0.5",
51 | "Accept-Encoding": "gzip, deflate, br",
52 | "Connection": "keep-alive",
53 | "Upgrade-Insecure-Requests": "1",
54 | "Cache-Control": "max-age=0",
55 | }
56 | request = requests.get(url, headers=headers)
57 | image = Image.open(BytesIO(request.content))
58 | image = torch.from_numpy(np.array(image).astype(np.float32) / 255.0).unsqueeze(
59 | 0
60 | )
61 | return image
62 |
63 | @retry(tries=MAX_RETRY)
64 | def forward(self, url):
65 | if url.startswith("s3://"):
66 | bucket, key = url.split("s3://")[1].split("/", 1)
67 | image = self.download_s3(bucket, key)
68 | elif re.match(r"^https://.*\.s3\..*\.amazonaws\.com/.*", url):
69 | _, _, bucket, key = url.split("/", 3)
70 | bucket = bucket.split(".")[0]
71 | image = self.download_s3(bucket, key)
72 | elif url.startswith("http"):
73 | image = self.download_http(url)
74 | else:
75 | raise ValueError("Invalid URL")
76 | return (image,)
77 |
78 |
79 | class ImageFromS3:
80 | @classmethod
81 | def INPUT_TYPES(s):
82 | return {
83 | "required": {
84 | "bucket": ([b["Name"] for b in s3_client.list_buckets()["Buckets"]],),
85 | "key": ("STRING", {"multiline": False}),
86 | }
87 | }
88 |
89 | RETURN_TYPES = ("IMAGE",)
90 | FUNCTION = "forward"
91 | CATEGORY = "aws"
92 |
93 | def download_s3(self, bucket, key):
94 | response = s3_client.get_object(Bucket=bucket, Key=key)
95 | image = Image.open(response["Body"])
96 | image = torch.from_numpy(np.array(image).astype(np.float32) / 255.0).unsqueeze(
97 | 0
98 | )
99 | return image
100 |
101 | @retry(tries=MAX_RETRY)
102 | def forward(self, bucket, key):
103 | image = self.download_s3(bucket, key)
104 | return (image,)
105 |
106 |
107 | class ImageToS3:
108 | @classmethod
109 | def INPUT_TYPES(s):
110 | return {
111 | "required": {
112 | "image": ("IMAGE",),
113 | "bucket": ([b["Name"] for b in s3_client.list_buckets()["Buckets"]],),
114 | "key": ("STRING", {"multiline": False}),
115 | }
116 | }
117 |
118 | RETURN_TYPES = ("STRING",)
119 | FUNCTION = "forward"
120 | CATEGORY = "aws"
121 | OUTPUT_NODE = True
122 |
123 | def upload_s3(self, image, bucket, key):
124 | image = image[0] * 255.0
125 | image = Image.fromarray(image.clamp(0, 255).numpy().round().astype(np.uint8))
126 | buffer = BytesIO()
127 | image.save(buffer, format=key.split(".")[-1])
128 | buffer.seek(0)
129 | s3_client.put_object(
130 | Bucket=bucket,
131 | Key=key,
132 | Body=buffer,
133 | ContentType=f"image/{key.split('.')[-1].lower()}",
134 | )
135 |
136 | @retry(tries=MAX_RETRY)
137 | def forward(self, image, bucket, key):
138 | self.upload_s3(image, bucket, key)
139 | return (f"s3://{bucket}/{key}",)
140 |
141 |
142 | NODE_CLASS_MAPPINGS = {
143 | "Image From URL": ImageFromURL,
144 | "Image From S3": ImageFromS3,
145 | "Image To S3": ImageToS3,
146 | }
147 |
--------------------------------------------------------------------------------
/nodes/session.py:
--------------------------------------------------------------------------------
1 | import requests
2 | from retry import retry
3 | import boto3
4 |
5 |
6 | MAX_RETRY = 3
7 |
8 |
9 | @retry(tries=MAX_RETRY)
10 | def get_client(service_name, clients={}):
11 | if service_name in clients:
12 | return clients[service_name]
13 |
14 | try:
15 | clients[service_name] = boto3.client(service_name=service_name)
16 | except Exception as e:
17 | # get region from gateway
18 | response = requests.put(
19 | "http://169.254.169.254/latest/api/token",
20 | headers={
21 | "X-aws-ec2-metadata-token-ttl-seconds": "21600",
22 | },
23 | )
24 | token = response.text
25 | response = requests.get(
26 | "http://169.254.169.254/latest/meta-data/placement/region",
27 | headers={
28 | "X-aws-ec2-metadata-token": token,
29 | },
30 | )
31 | boto3.setup_default_session(region_name=response.text)
32 | print("Automatically set region to", response.text)
33 | clients[service_name] = boto3.client(service_name=service_name)
34 | return clients[service_name]
35 |
--------------------------------------------------------------------------------
/nodes/textract.py:
--------------------------------------------------------------------------------
1 | import json
2 | import os
3 | import re
4 | import base64
5 | from io import BytesIO
6 | import io
7 | import requests
8 | from retry import retry
9 | import boto3
10 | from PIL import Image
11 | import numpy as np
12 | import torch
13 | import cv2
14 | import folder_paths
15 | from .session import get_client
16 | import torch
17 | from torchvision import transforms
18 | from paddleocr import PaddleOCR, draw_ocr
19 | from paddleocr import PPStructure,draw_structure_result
20 | import tempfile
21 |
22 |
23 | current_directory = os.path.dirname(os.path.abspath(__file__))
24 | temp_img_path = os.path.join(current_directory, "temp_dir", "AnyText_manual_mask_pos_img.png")
25 | textract = get_client(service_name="textract")
26 | MAX_RETRY = 3
27 |
28 |
29 | class ImageOCRByTextract:
30 | @classmethod
31 | def INPUT_TYPES(s):
32 | return {
33 | "required": {
34 | "image": ("IMAGE",)
35 | }
36 | }
37 |
38 | RETURN_TYPES = ("STRING","INT","INT","INT","INT","IMAGE")
39 | RETURN_NAMES = ("Text","Left","Top","Width","Height","Mask Image")
40 | FUNCTION = "forward"
41 | CATEGORY = "aws"
42 | OUTPUT_NODE = True
43 |
44 | def ocr_by_textract(self,image_input):
45 | numpy_image = (image_input[0] * 255.0).clamp(0, 255).numpy()
46 | image = image_input[0] * 255.0
47 | image = Image.fromarray(image.clamp(0, 255).numpy().round().astype(np.uint8))
48 | # Get the original dimensions of the image
49 | img_width, img_height = image.size
50 |
51 | ## Call Textract DetectDocumentText function
52 | byte_stream = io.BytesIO()
53 | image.save(byte_stream, format='PNG')
54 | byte_image = byte_stream.getvalue()
55 | response = textract.detect_document_text(Document={'Bytes': byte_image})
56 |
57 | #initial the scan result
58 | result = []
59 | # Create a black mask image of the same size as the original image
60 | mask = np.zeros_like(numpy_image)
61 | all_text=""
62 |
63 |
64 | # Extract text and bounding box information
65 | for item in response['Blocks']:
66 | if item['BlockType'] == 'LINE':
67 | text = item['Text']
68 | box = item['Geometry']['BoundingBox']
69 | left = int(box['Left'] * img_width)
70 | top = int(box['Top'] * img_height)
71 | width = int(box['Width'] * img_width)
72 | height = int(box['Height']* img_height)
73 |
74 | ##Add information to the result list
75 | result.append({
76 | 'Text': text,
77 | 'Left': left,
78 | 'Top': top,
79 | 'Width': width,
80 | 'Height': height
81 |
82 | })
83 | all_text=all_text+text
84 | #Draw mask for each text information box
85 | #Specify the coordinates of the top-left and bottom-right corners of the rectangle
86 | temp_mask = np.zeros_like(numpy_image)
87 | x1, y1 = int(left), int(top)
88 | x2, y2 = int(left + width), int(top + height)
89 | # Draw white rectangular boxes on the mask image
90 | cv2.rectangle(temp_mask, (x1, y1), (x2, y2), (255, 255, 255), -1)
91 | mask = cv2.bitwise_or(mask, temp_mask)
92 |
93 | # Perform bitwise operation between the mask image and the original image to generate the masked image
94 | masked_img = cv2.bitwise_and(numpy_image, mask)
95 | masked_img = torch.from_numpy(np.array(masked_img).astype(np.float32) / 255.0).unsqueeze(0)
96 |
97 | print("result",result)
98 |
99 | return all_text,result[0]['Left'],result[0]['Top'],result[0]['Width'],result[0]['Height'],masked_img
100 |
101 |
102 | @retry(tries=MAX_RETRY)
103 | def forward(self, image):
104 | return self.ocr_by_textract(image)
105 |
106 |
107 | ### for anyText Nodes
108 | class ImageOCRByTextractV2:
109 |
110 | @classmethod
111 | def INPUT_TYPES(s):
112 | input_dir = folder_paths.get_input_directory()
113 | files = [f for f in os.listdir(input_dir) if os.path.isfile(os.path.join(input_dir, f))]
114 | return {"required":
115 | {
116 | "image": (sorted(files), {"image_upload": True}),
117 | },
118 | }
119 |
120 | RETURN_TYPES = ("STRING","STRING","STRING","IMAGE")
121 | RETURN_NAMES = ("Texts","Original Image Path","Mask Image Path","AnyText Mask Image")
122 | FUNCTION = "forward"
123 | CATEGORY = "aws"
124 | OUTPUT_NODE = True
125 |
126 | def ocr_by_textract(self,image_input):
127 | ori_image_path = folder_paths.get_annotated_filepath(image_input)
128 | pos_img_path = os.path.join(temp_img_path)
129 |
130 | ## Convert orignal image to tensor
131 | pil_image = Image.open(ori_image_path)
132 | transform = transforms.Compose([
133 | transforms.ToTensor(), # Convert PIL Image to tensor and normalize pixel values to [0, 1]
134 | ])
135 | image_input = transform(pil_image)
136 |
137 | ## The image input is already in the standard ComfyUI image tensor format
138 | numpy_image = (image_input[0] * 255.0).clamp(0, 255).numpy()
139 | image = image_input[0] * 255.0
140 | image = Image.fromarray(image.clamp(0, 255).numpy().round().astype(np.uint8))
141 | # Get the original dimensions of the image
142 | img_width, img_height = image.size
143 |
144 | # Call the Textract DetectDocumentText function
145 | byte_stream = io.BytesIO()
146 | image.save(byte_stream, format='PNG')
147 | byte_image = byte_stream.getvalue()
148 | response = textract.detect_document_text(Document={'Bytes': byte_image})
149 |
150 | # Initialize the results list
151 | result = []
152 | # Create a black mask image with the same size as the original image
153 | #mask = np.zeros_like(numpy_image)
154 | mask = np.ones_like(numpy_image) * 255
155 | all_text=""
156 |
157 |
158 | # Extract text and bounding box information
159 | for item in response['Blocks']:
160 | if item['BlockType'] == 'LINE':
161 | text = item['Text']
162 | box = item['Geometry']['BoundingBox']
163 | left = int(box['Left'] * img_width)
164 | top = int(box['Top'] * img_height)
165 | width = int(box['Width'] * img_width)
166 | height = int(box['Height']* img_height)
167 |
168 | # Add the information to the result list
169 | result.append(text)
170 | # Draw mask for each text information box
171 | # Specify the coordinates of the top-left and bottom-right corners of the rectangle
172 | temp_mask = np.zeros_like(numpy_image)
173 | x1, y1 = int(left), int(top)
174 | x2, y2 = int(left + width), int(top + height)
175 | # Draw black rectangular boxes on the mask image
176 | #cv2.rectangle(temp_mask, (x1, y1), (x2, y2), (0, 0, 0), -1)
177 | #mask = cv2.bitwise_or(mask, temp_mask)
178 |
179 | cv2.rectangle(mask, (left, top), (left + width, top + height), (0, 0, 0), -1)
180 |
181 | # Perform bitwise operation between the mask image and the original image to generate the masked image
182 | masked_img = np.where(mask == 0, 0, numpy_image)
183 | #masked_img = cv2.bitwise_and(numpy_image, mask)
184 | masked_img = torch.from_numpy(np.array(masked_img).astype(np.float32) / 255.0).unsqueeze(0)
185 | #masked_img.save(temp_img_path)
186 | # Convert PyTorch tensor to PIL image
187 | to_pil = transforms.ToPILImage()
188 | pil_image = to_pil(masked_img.squeeze(0))
189 | # save PIL image
190 | pil_image.save(temp_img_path)
191 |
192 | all_text="|".join(result)
193 | print("result",result)
194 |
195 | return all_text ,ori_image_path,temp_img_path, masked_img
196 |
197 |
198 |
199 | @retry(tries=MAX_RETRY)
200 | def forward(self, image):
201 | return self.ocr_by_textract(image)
202 |
203 |
204 | ## for layer style nodes
205 | class ImageOCRByTextractV4:
206 |
207 | @classmethod
208 | def INPUT_TYPES(s):
209 | input_dir = folder_paths.get_input_directory()
210 | files = [f for f in os.listdir(input_dir) if os.path.isfile(os.path.join(input_dir, f))]
211 | return {"required":{
212 | "image": ("IMAGE",),
213 | }
214 | }
215 |
216 | RETURN_TYPES = ("STRING","STRING","STRING","STRING","STRING","STRING","STRING","IMAGE","IMAGE")
217 | RETURN_NAMES = ("Texts","x_offsets","y_offsets","widths","heights","img_width","img_height","Mask Image","Original Image")
218 | FUNCTION = "forward"
219 | CATEGORY = "aws"
220 | OUTPUT_NODE = True
221 |
222 | def convert_to_xywh(self,coordinates):
223 | # Extract all x and y coordinates
224 | x_coords = [coord[0] for coord in coordinates]
225 | y_coords = [coord[1] for coord in coordinates]
226 |
227 | # Calculate x offset and y offset
228 | x_offset = min(x_coords)
229 | y_offset = min(y_coords)
230 |
231 | # Calculate width and height
232 | width = max(x_coords) - x_offset
233 | height = max(y_coords) - y_offset
234 |
235 | print(x_offset,y_offset,width,height)
236 | return int(x_offset), int(y_offset), int(width), int(height)
237 |
238 | def ocr_by_paddleocr(self,image_input):
239 |
240 | image = image_input[0] * 255.0
241 | image = Image.fromarray(image.clamp(0, 255).numpy().round().astype(np.uint8))
242 | numpy_image = (image_input[0] * 255.0).clamp(0, 255).numpy()
243 |
244 | img_width, img_height = image.size
245 |
246 |
247 | # Create temporary file
248 | with tempfile.NamedTemporaryFile(delete=False, suffix='.png', dir='/tmp/') as temp_file:
249 | temp_filename = temp_file.name
250 |
251 | # Save numpy_image as a temporary file
252 | cv2.imwrite(temp_filename, cv2.cvtColor(numpy_image, cv2.COLOR_RGB2BGR))
253 |
254 | ocr = PaddleOCR(
255 | det_model_dir='/home/ubuntu/ComfyUI/models/checkpoints/PaddleOCR/det',
256 | rec_model_dir='/home/ubuntu/ComfyUI/models/checkpoints/PaddleOCR/rec',
257 | det_limit_side_len=2048,
258 | use_angle_cls=True,
259 | #use_gpu=False,
260 | )
261 |
262 | ocr_results = ocr.ocr(temp_filename, cls=True)[0]
263 |
264 | # Create a mask image with the same size as the original image
265 | result = []
266 | masked_img = numpy_image.copy()
267 | all_text=""
268 | x_offsets=[]
269 | y_offsets=[]
270 | widths=[]
271 | heights=[]
272 |
273 |
274 | # Extract text and bounding box information
275 | for line in ocr_results:
276 | if not isinstance(line, list):
277 | continue
278 | boxes = line[0]
279 | x_offset,y_offset,width,height = self.convert_to_xywh(boxes)
280 | x_offsets.append(str(x_offset))
281 | y_offsets.append(str(y_offset))
282 | widths.append(str(width))
283 | heights.append(str(height))
284 |
285 | text = line[1][0]
286 | print("text")
287 | print(text)
288 | result.append(text)
289 |
290 | # Draw mask for each text information box
291 | # Specify the coordinates of the top-left and bottom-right corners of the rectangle
292 | x1, y1 = int(x_offset), int(y_offset)
293 | x2, y2 = int(x_offset + width), int(y_offset + height)
294 | # Draw a black rectangle on the mask image
295 | cv2.rectangle(masked_img, (x1, y1), (x2, y2), (0, 0, 0), -1)
296 |
297 | masked_img = torch.from_numpy(np.array(masked_img).astype(np.float32) / 255.0).unsqueeze(0)
298 |
299 | all_text="|".join(result)
300 | x_offsets="|".join(x_offsets)
301 | y_offsets="|".join(y_offsets)
302 | widths="|".join(widths)
303 | heights="|".join(heights)
304 |
305 | print("result",result)
306 |
307 | # Add original image output
308 | original_img = image_input
309 | # delete temp files
310 | os.unlink(temp_filename)
311 |
312 | return all_text ,x_offsets,y_offsets,widths,heights,img_width,img_height, masked_img,original_img
313 |
314 |
315 |
316 | @retry(tries=MAX_RETRY)
317 | def forward(self, image):
318 | return self.ocr_by_paddleocr(image)
319 |
320 |
321 | ## for layer style nodes
322 | class ImageOCRByTextractV3:
323 |
324 | @classmethod
325 | def INPUT_TYPES(s):
326 | input_dir = folder_paths.get_input_directory()
327 | files = [f for f in os.listdir(input_dir) if os.path.isfile(os.path.join(input_dir, f))]
328 | return {"required":{
329 | "image": ("IMAGE",),
330 | }
331 | }
332 |
333 | RETURN_TYPES = ("STRING","STRING","STRING","STRING","STRING","STRING","STRING","IMAGE","IMAGE")
334 | RETURN_NAMES = ("Texts","x_offsets","y_offsets","widths","heights","img_width","img_height","Mask Image","Original Image")
335 | FUNCTION = "forward"
336 | CATEGORY = "aws"
337 | OUTPUT_NODE = True
338 |
339 | def ocr_by_textract(self,image_input):
340 |
341 |
342 | image = image_input[0] * 255.0
343 | image = Image.fromarray(image.clamp(0, 255).numpy().round().astype(np.uint8))
344 | numpy_image = (image_input[0] * 255.0).clamp(0, 255).numpy()
345 |
346 |
347 | img_width, img_height = image.size
348 |
349 |
350 | byte_stream = io.BytesIO()
351 | image.save(byte_stream, format='PNG')
352 | byte_image = byte_stream.getvalue()
353 | response = textract.detect_document_text(Document={'Bytes': byte_image})
354 |
355 |
356 | result = []
357 |
358 | masked_img = numpy_image.copy()
359 | all_text=""
360 | x_offsets=[]
361 | y_offsets=[]
362 | widths=[]
363 | heights=[]
364 |
365 |
366 |
367 | for item in response['Blocks']:
368 | if item['BlockType'] == 'LINE':
369 | text = item['Text']
370 | box = item['Geometry']['BoundingBox']
371 |
372 | left = int(box['Left'] * img_width)
373 | x_offsets.append(str(left))
374 |
375 | top = int(box['Top'] * img_height)
376 | y_offsets.append(str(top))
377 |
378 | width = int(box['Width'] * img_width)
379 | widths.append(str(width))
380 |
381 | height = int(box['Height']* img_height)
382 | heights.append(str(height))
383 |
384 |
385 | result.append(text)
386 | # Draw mask for each text information box
387 | # Specify the coordinates of the top-left and bottom-right corners of the rectangle
388 | x1, y1 = int(left), int(top)
389 | x2, y2 = int(left + width), int(top + height)
390 | # Draw black rectangular boxes on the mask image
391 | cv2.rectangle(masked_img, (x1, y1), (x2, y2), (0, 0, 0), -1)
392 |
393 |
394 | #masked_img = torch.from_numpy(np.array(masked_img).astype(np.float32) / 255.0).unsqueeze(0)
395 | masked_img = torch.from_numpy(np.array(masked_img).astype(np.float32) / 255.0).unsqueeze(0)
396 | ###summary the output
397 | all_text="|".join(result)
398 | x_offsets="|".join(x_offsets)
399 | y_offsets="|".join(y_offsets)
400 | widths="|".join(widths)
401 | heights="|".join(heights)
402 |
403 | print("result",result)
404 |
405 | # Add original image output
406 | original_img = image_input
407 |
408 | return all_text ,x_offsets,y_offsets,widths,heights,img_width,img_height, masked_img,original_img
409 |
410 |
411 |
412 | @retry(tries=MAX_RETRY)
413 | def forward(self, image):
414 | return self.ocr_by_textract(image)
415 |
416 |
417 | NODE_CLASS_MAPPINGS = {
418 | "Image OCR By Textract": ImageOCRByTextract,
419 | "Image OCR By Textract V2":ImageOCRByTextractV2,
420 | "Image OCR By Textract V3":ImageOCRByTextractV3,
421 | "Image OCR by PaddleOCR": ImageOCRByTextractV4
422 | }
423 |
424 |
--------------------------------------------------------------------------------
/nodes/utils.py:
--------------------------------------------------------------------------------
1 | import json
2 | import os
3 | import re
4 | import base64
5 | from io import BytesIO
6 |
7 | from PIL import Image
8 | import numpy as np
9 | import torch
10 |
11 |
12 | def image_to_base64(image):
13 | image = image[0] * 255.0
14 | image = Image.fromarray(image.clamp(0, 255).numpy().round().astype(np.uint8))
15 |
16 | buffer = BytesIO()
17 | image.save(buffer, format="PNG")
18 |
19 | image_data = buffer.getvalue()
20 |
21 | image_base64 = base64.b64encode(image_data).decode("utf-8")
22 |
23 | return image_base64
--------------------------------------------------------------------------------
/requirements.txt:
--------------------------------------------------------------------------------
1 | boto3
2 | requests
3 | retry
4 |
--------------------------------------------------------------------------------
/workflows/amazon_nova_canvas_nodes.json:
--------------------------------------------------------------------------------
1 | {
2 | "last_node_id": 17,
3 | "last_link_id": 23,
4 | "nodes": [
5 | {
6 | "id": 8,
7 | "type": "LoadImage",
8 | "pos": [
9 | -300,
10 | 0
11 | ],
12 | "size": [
13 | 340,
14 | 340
15 | ],
16 | "flags": {},
17 | "order": 0,
18 | "mode": 0,
19 | "inputs": [],
20 | "outputs": [
21 | {
22 | "name": "IMAGE",
23 | "type": "IMAGE",
24 | "links": [
25 | 18
26 | ],
27 | "slot_index": 0
28 | },
29 | {
30 | "name": "MASK",
31 | "type": "MASK",
32 | "links": null,
33 | "slot_index": 1
34 | }
35 | ],
36 | "properties": {
37 | "Node name for S&R": "LoadImage"
38 | },
39 | "widgets_values": [
40 | "aws_logo_portrait_2.png",
41 | "image"
42 | ]
43 | },
44 | {
45 | "id": 9,
46 | "type": "Amazon Bedrock - Nova Canvas Generate Image",
47 | "pos": [
48 | 100,
49 | 0
50 | ],
51 | "size": [
52 | 453.5999755859375,
53 | 344
54 | ],
55 | "flags": {},
56 | "order": 2,
57 | "mode": 0,
58 | "inputs": [
59 | {
60 | "name": "image",
61 | "type": "IMAGE",
62 | "link": 18,
63 | "shape": 7
64 | }
65 | ],
66 | "outputs": [
67 | {
68 | "name": "IMAGE",
69 | "type": "IMAGE",
70 | "links": [
71 | 19
72 | ],
73 | "slot_index": 0
74 | }
75 | ],
76 | "properties": {
77 | "Node name for S&R": "Amazon Bedrock - Nova Canvas Generate Image"
78 | },
79 | "widgets_values": [
80 | "retro anime vibes, akira",
81 | 2,
82 | "1024 x 1024",
83 | 7,
84 | 52793584,
85 | "fixed",
86 | "",
87 | "",
88 | "SEGMENTATION",
89 | 0.7
90 | ]
91 | },
92 | {
93 | "id": 17,
94 | "type": "LoadImage",
95 | "pos": [
96 | -300,
97 | 400
98 | ],
99 | "size": [
100 | 340,
101 | 320
102 | ],
103 | "flags": {},
104 | "order": 1,
105 | "mode": 0,
106 | "inputs": [],
107 | "outputs": [
108 | {
109 | "name": "IMAGE",
110 | "type": "IMAGE",
111 | "links": [
112 | 22,
113 | 23
114 | ],
115 | "slot_index": 0
116 | },
117 | {
118 | "name": "MASK",
119 | "type": "MASK",
120 | "links": null
121 | }
122 | ],
123 | "properties": {
124 | "Node name for S&R": "LoadImage"
125 | },
126 | "widgets_values": [
127 | "bird.jpg",
128 | "image"
129 | ]
130 | },
131 | {
132 | "id": 15,
133 | "type": "PreviewImage",
134 | "pos": [
135 | 600,
136 | 400
137 | ],
138 | "size": [
139 | 580,
140 | 320
141 | ],
142 | "flags": {},
143 | "order": 7,
144 | "mode": 0,
145 | "inputs": [
146 | {
147 | "name": "images",
148 | "type": "IMAGE",
149 | "link": 20
150 | }
151 | ],
152 | "outputs": [],
153 | "properties": {
154 | "Node name for S&R": "PreviewImage"
155 | }
156 | },
157 | {
158 | "id": 11,
159 | "type": "Amazon Bedrock - Nova Canvas Background Prompt Replace",
160 | "pos": [
161 | 100,
162 | 780
163 | ],
164 | "size": [
165 | 460,
166 | 222
167 | ],
168 | "flags": {},
169 | "order": 3,
170 | "mode": 0,
171 | "inputs": [
172 | {
173 | "name": "image",
174 | "type": "IMAGE",
175 | "link": 22
176 | }
177 | ],
178 | "outputs": [
179 | {
180 | "name": "IMAGE",
181 | "type": "IMAGE",
182 | "links": [
183 | 21
184 | ],
185 | "slot_index": 0
186 | }
187 | ],
188 | "properties": {
189 | "Node name for S&R": "Amazon Bedrock - Nova Canvas Background Prompt Replace"
190 | },
191 | "widgets_values": [
192 | "dystopian landscape",
193 | "bird",
194 | 2,
195 | 8.837,
196 | 28,
197 | "fixed"
198 | ]
199 | },
200 | {
201 | "id": 16,
202 | "type": "PreviewImage",
203 | "pos": [
204 | 600,
205 | 780
206 | ],
207 | "size": [
208 | 580,
209 | 300
210 | ],
211 | "flags": {},
212 | "order": 6,
213 | "mode": 0,
214 | "inputs": [
215 | {
216 | "name": "images",
217 | "type": "IMAGE",
218 | "link": 21
219 | }
220 | ],
221 | "outputs": [],
222 | "properties": {
223 | "Node name for S&R": "PreviewImage"
224 | }
225 | },
226 | {
227 | "id": 2,
228 | "type": "PreviewImage",
229 | "pos": [
230 | 600,
231 | 0
232 | ],
233 | "size": [
234 | 580,
235 | 340
236 | ],
237 | "flags": {},
238 | "order": 5,
239 | "mode": 0,
240 | "inputs": [
241 | {
242 | "name": "images",
243 | "type": "IMAGE",
244 | "link": 19
245 | }
246 | ],
247 | "outputs": [],
248 | "properties": {
249 | "Node name for S&R": "PreviewImage"
250 | },
251 | "widgets_values": []
252 | },
253 | {
254 | "id": 10,
255 | "type": "Amazon Bedrock - Nova Canvas Generate Variations",
256 | "pos": [
257 | 100,
258 | 400
259 | ],
260 | "size": [
261 | 460,
262 | 320
263 | ],
264 | "flags": {},
265 | "order": 4,
266 | "mode": 0,
267 | "inputs": [
268 | {
269 | "name": "image",
270 | "type": "IMAGE",
271 | "link": 23
272 | }
273 | ],
274 | "outputs": [
275 | {
276 | "name": "IMAGE",
277 | "type": "IMAGE",
278 | "links": [
279 | 20
280 | ],
281 | "slot_index": 0
282 | }
283 | ],
284 | "properties": {
285 | "Node name for S&R": "Amazon Bedrock - Nova Canvas Generate Variations"
286 | },
287 | "widgets_values": [
288 | "retro anime vibes, akira",
289 | "blurry, bad quality",
290 | 0.9400000000000001,
291 | 2,
292 | 4.988,
293 | "1024 x 1024",
294 | 489754107,
295 | "fixed"
296 | ]
297 | }
298 | ],
299 | "links": [
300 | [
301 | 18,
302 | 8,
303 | 0,
304 | 9,
305 | 0,
306 | "IMAGE"
307 | ],
308 | [
309 | 19,
310 | 9,
311 | 0,
312 | 2,
313 | 0,
314 | "IMAGE"
315 | ],
316 | [
317 | 20,
318 | 10,
319 | 0,
320 | 15,
321 | 0,
322 | "IMAGE"
323 | ],
324 | [
325 | 21,
326 | 11,
327 | 0,
328 | 16,
329 | 0,
330 | "IMAGE"
331 | ],
332 | [
333 | 22,
334 | 17,
335 | 0,
336 | 11,
337 | 0,
338 | "IMAGE"
339 | ],
340 | [
341 | 23,
342 | 17,
343 | 0,
344 | 10,
345 | 0,
346 | "IMAGE"
347 | ]
348 | ],
349 | "groups": [],
350 | "config": {},
351 | "extra": {
352 | "ds": {
353 | "scale": 0.8264462809917354,
354 | "offset": {
355 | "0": 446.286865234375,
356 | "1": 53.397308349609375
357 | }
358 | },
359 | "ue_links": []
360 | },
361 | "version": 0.4
362 | }
--------------------------------------------------------------------------------
/workflows/amazon_stablity_ai.json:
--------------------------------------------------------------------------------
1 | {
2 | "last_node_id": 20,
3 | "last_link_id": 29,
4 | "nodes": [
5 | {
6 | "id": 2,
7 | "type": "PreviewImage",
8 | "pos": [
9 | 600,
10 | 0
11 | ],
12 | "size": [
13 | 580,
14 | 340
15 | ],
16 | "flags": {},
17 | "order": 3,
18 | "mode": 0,
19 | "inputs": [
20 | {
21 | "name": "images",
22 | "type": "IMAGE",
23 | "link": 25
24 | }
25 | ],
26 | "outputs": [],
27 | "properties": {
28 | "Node name for S&R": "PreviewImage"
29 | },
30 | "widgets_values": []
31 | },
32 | {
33 | "id": 17,
34 | "type": "LoadImage",
35 | "pos": [
36 | -300,
37 | 400
38 | ],
39 | "size": [
40 | 340,
41 | 320
42 | ],
43 | "flags": {},
44 | "order": 0,
45 | "mode": 0,
46 | "inputs": [],
47 | "outputs": [
48 | {
49 | "name": "IMAGE",
50 | "type": "IMAGE",
51 | "links": [
52 | 27
53 | ],
54 | "slot_index": 0
55 | },
56 | {
57 | "name": "MASK",
58 | "type": "MASK",
59 | "links": null
60 | }
61 | ],
62 | "properties": {
63 | "Node name for S&R": "LoadImage"
64 | },
65 | "widgets_values": [
66 | "bird.jpg",
67 | "image"
68 | ]
69 | },
70 | {
71 | "id": 20,
72 | "type": "PreviewImage",
73 | "pos": [
74 | 600,
75 | 400
76 | ],
77 | "size": [
78 | 580,
79 | 380
80 | ],
81 | "flags": {},
82 | "order": 4,
83 | "mode": 0,
84 | "inputs": [
85 | {
86 | "name": "images",
87 | "type": "IMAGE",
88 | "link": 29
89 | }
90 | ],
91 | "outputs": [],
92 | "properties": {
93 | "Node name for S&R": "PreviewImage"
94 | },
95 | "widgets_values": []
96 | },
97 | {
98 | "id": 19,
99 | "type": "Amazon Bedrock - SD3 & SD3.5 Large | Image to Image",
100 | "pos": [
101 | 100,
102 | 400
103 | ],
104 | "size": [
105 | 460,
106 | 222
107 | ],
108 | "flags": {},
109 | "order": 2,
110 | "mode": 0,
111 | "inputs": [
112 | {
113 | "name": "image",
114 | "type": "IMAGE",
115 | "link": 27
116 | }
117 | ],
118 | "outputs": [
119 | {
120 | "name": "IMAGE",
121 | "type": "IMAGE",
122 | "links": [
123 | 29
124 | ],
125 | "slot_index": 0
126 | }
127 | ],
128 | "properties": {
129 | "Node name for S&R": "Amazon Bedrock - SD3 & SD3.5 Large | Image to Image"
130 | },
131 | "widgets_values": [
132 | "SD3.5_Large",
133 | "retro anime, akira bird",
134 | 3630365967,
135 | "fixed",
136 | 0.85,
137 | "bad quality, blurry"
138 | ]
139 | },
140 | {
141 | "id": 18,
142 | "type": "Amazon Bedrock - Stability AI Models | Text to Image",
143 | "pos": [
144 | 100,
145 | 0
146 | ],
147 | "size": [
148 | 460,
149 | 222
150 | ],
151 | "flags": {},
152 | "order": 1,
153 | "mode": 0,
154 | "inputs": [],
155 | "outputs": [
156 | {
157 | "name": "IMAGE",
158 | "type": "IMAGE",
159 | "links": [
160 | 25
161 | ],
162 | "slot_index": 0
163 | }
164 | ],
165 | "properties": {
166 | "Node name for S&R": "Amazon Bedrock - Stability AI Models | Text to Image"
167 | },
168 | "widgets_values": [
169 | "Stable_Image_Ultra",
170 | "retro anime, akira",
171 | "1024 x 1024",
172 | 3700475971,
173 | "fixed",
174 | "bad quality, blurry"
175 | ]
176 | }
177 | ],
178 | "links": [
179 | [
180 | 25,
181 | 18,
182 | 0,
183 | 2,
184 | 0,
185 | "IMAGE"
186 | ],
187 | [
188 | 27,
189 | 17,
190 | 0,
191 | 19,
192 | 0,
193 | "IMAGE"
194 | ],
195 | [
196 | 29,
197 | 19,
198 | 0,
199 | 20,
200 | 0,
201 | "IMAGE"
202 | ]
203 | ],
204 | "groups": [],
205 | "config": {},
206 | "extra": {
207 | "ds": {
208 | "scale": 1,
209 | "offset": {
210 | "0": 367.79296875,
211 | "1": 77.46484375
212 | }
213 | },
214 | "ue_links": []
215 | },
216 | "version": 0.4
217 | }
--------------------------------------------------------------------------------
/workflows/generate_image_variation.json:
--------------------------------------------------------------------------------
1 | {
2 | "last_node_id": 51,
3 | "last_link_id": 69,
4 | "nodes": [
5 | {
6 | "id": 40,
7 | "type": "PreviewImage",
8 | "pos": [
9 | 359,
10 | 818
11 | ],
12 | "size": {
13 | "0": 367.423583984375,
14 | "1": 246
15 | },
16 | "flags": {},
17 | "order": 2,
18 | "mode": 0,
19 | "inputs": [
20 | {
21 | "name": "images",
22 | "type": "IMAGE",
23 | "link": 53
24 | }
25 | ],
26 | "properties": {
27 | "Node name for S&R": "PreviewImage"
28 | }
29 | },
30 | {
31 | "id": 23,
32 | "type": "Prompt Template",
33 | "pos": [
34 | 791.1583716027051,
35 | 404.251692350905
36 | ],
37 | "size": {
38 | "0": 380.399658203125,
39 | "1": 92.37116241455078
40 | },
41 | "flags": {},
42 | "order": 3,
43 | "mode": 0,
44 | "inputs": [
45 | {
46 | "name": "prompt",
47 | "type": "STRING",
48 | "link": 17,
49 | "widget": {
50 | "name": "prompt"
51 | }
52 | }
53 | ],
54 | "outputs": [
55 | {
56 | "name": "STRING",
57 | "type": "STRING",
58 | "links": [
59 | 45
60 | ],
61 | "shape": 3,
62 | "slot_index": 0
63 | }
64 | ],
65 | "properties": {
66 | "Node name for S&R": "Prompt Template"
67 | },
68 | "widgets_values": [
69 | "",
70 | "Describe a beautiful picture in detail using one sentence of \"[prompt]\", please answer in English words only, skip the preamble."
71 | ]
72 | },
73 | {
74 | "id": 8,
75 | "type": "ShowText|pysssss",
76 | "pos": [
77 | 797.1583716027051,
78 | 739.2516923509042
79 | ],
80 | "size": {
81 | "0": 370,
82 | "1": 100
83 | },
84 | "flags": {},
85 | "order": 5,
86 | "mode": 0,
87 | "inputs": [
88 | {
89 | "name": "text",
90 | "type": "STRING",
91 | "link": 46,
92 | "widget": {
93 | "name": "text"
94 | }
95 | }
96 | ],
97 | "outputs": [
98 | {
99 | "name": "STRING",
100 | "type": "STRING",
101 | "links": null,
102 | "shape": 6,
103 | "slot_index": 0
104 | }
105 | ],
106 | "properties": {
107 | "Node name for S&R": "ShowText|pysssss"
108 | },
109 | "widgets_values": [
110 | "",
111 | "A serene winter landscape with a fluffy feline companion adorned in a festive red bow, gazing contentedly at the glistening snow-covered trees and twinkling lights that adorn the cozy cabin in the distance."
112 | ]
113 | },
114 | {
115 | "id": 39,
116 | "type": "Image From URL",
117 | "pos": [
118 | 374,
119 | 662
120 | ],
121 | "size": {
122 | "0": 339.4082336425781,
123 | "1": 58
124 | },
125 | "flags": {},
126 | "order": 0,
127 | "mode": 0,
128 | "outputs": [
129 | {
130 | "name": "IMAGE",
131 | "type": "IMAGE",
132 | "links": [
133 | 53,
134 | 63
135 | ],
136 | "shape": 3,
137 | "slot_index": 0
138 | }
139 | ],
140 | "title": "Image to be Edit From URL",
141 | "properties": {
142 | "Node name for S&R": "Image From URL"
143 | },
144 | "widgets_values": [
145 | "https://github.com/aws-samples/comfyui-llm-node-for-amazon-bedrock/blob/main/assets/cat.png?raw=true"
146 | ]
147 | },
148 | {
149 | "id": 49,
150 | "type": "Bedrock - Titan Variation",
151 | "pos": [
152 | 1229.2916318058303,
153 | 406.2239222923436
154 | ],
155 | "size": {
156 | "0": 610.4379272460938,
157 | "1": 192
158 | },
159 | "flags": {},
160 | "order": 6,
161 | "mode": 0,
162 | "inputs": [
163 | {
164 | "name": "image",
165 | "type": "IMAGE",
166 | "link": 63
167 | },
168 | {
169 | "name": "prompt",
170 | "type": "STRING",
171 | "link": 67,
172 | "widget": {
173 | "name": "prompt"
174 | }
175 | }
176 | ],
177 | "outputs": [
178 | {
179 | "name": "IMAGE",
180 | "type": "IMAGE",
181 | "links": [
182 | 64
183 | ],
184 | "shape": 3,
185 | "slot_index": 0
186 | }
187 | ],
188 | "properties": {
189 | "Node name for S&R": "Bedrock - Titan Variation"
190 | },
191 | "widgets_values": [
192 | "",
193 | "",
194 | 0.7,
195 | 4,
196 | 8,
197 | "1024 x 1024"
198 | ]
199 | },
200 | {
201 | "id": 33,
202 | "type": "PreviewImage",
203 | "pos": [
204 | 1233.2916318058303,
205 | 649.2239222923433
206 | ],
207 | "size": {
208 | "0": 609.0910034179688,
209 | "1": 642.6731567382812
210 | },
211 | "flags": {},
212 | "order": 7,
213 | "mode": 0,
214 | "inputs": [
215 | {
216 | "name": "images",
217 | "type": "IMAGE",
218 | "link": 64
219 | }
220 | ],
221 | "properties": {
222 | "Node name for S&R": "PreviewImage"
223 | }
224 | },
225 | {
226 | "id": 34,
227 | "type": "Bedrock - Claude",
228 | "pos": [
229 | 789.1583716027051,
230 | 538.2516923509042
231 | ],
232 | "size": {
233 | "0": 378.8060302734375,
234 | "1": 154.25772094726562
235 | },
236 | "flags": {},
237 | "order": 4,
238 | "mode": 0,
239 | "inputs": [
240 | {
241 | "name": "prompt",
242 | "type": "STRING",
243 | "link": 45,
244 | "widget": {
245 | "name": "prompt"
246 | }
247 | }
248 | ],
249 | "outputs": [
250 | {
251 | "name": "STRING",
252 | "type": "STRING",
253 | "links": [
254 | 46,
255 | 67
256 | ],
257 | "shape": 3,
258 | "slot_index": 0
259 | }
260 | ],
261 | "properties": {
262 | "Node name for S&R": "Bedrock - Claude"
263 | },
264 | "widgets_values": [
265 | "",
266 | "anthropic.claude-3-haiku-20240307-v1:0",
267 | 200,
268 | 0.5,
269 | 1,
270 | 250
271 | ]
272 | },
273 | {
274 | "id": 5,
275 | "type": "Text Multiline",
276 | "pos": [
277 | 362,
278 | 368
279 | ],
280 | "size": {
281 | "0": 379.65625,
282 | "1": 198.87109375
283 | },
284 | "flags": {},
285 | "order": 1,
286 | "mode": 0,
287 | "outputs": [
288 | {
289 | "name": "STRING",
290 | "type": "STRING",
291 | "links": [
292 | 17
293 | ],
294 | "shape": 3,
295 | "slot_index": 0
296 | }
297 | ],
298 | "title": "Image Editing Prompt",
299 | "properties": {
300 | "Node name for S&R": "Text Multiline"
301 | },
302 | "widgets_values": [
303 | "christmas cat"
304 | ]
305 | }
306 | ],
307 | "links": [
308 | [
309 | 17,
310 | 5,
311 | 0,
312 | 23,
313 | 0,
314 | "STRING"
315 | ],
316 | [
317 | 45,
318 | 23,
319 | 0,
320 | 34,
321 | 0,
322 | "STRING"
323 | ],
324 | [
325 | 46,
326 | 34,
327 | 0,
328 | 8,
329 | 0,
330 | "STRING"
331 | ],
332 | [
333 | 53,
334 | 39,
335 | 0,
336 | 40,
337 | 0,
338 | "IMAGE"
339 | ],
340 | [
341 | 63,
342 | 39,
343 | 0,
344 | 49,
345 | 0,
346 | "IMAGE"
347 | ],
348 | [
349 | 64,
350 | 49,
351 | 0,
352 | 33,
353 | 0,
354 | "IMAGE"
355 | ],
356 | [
357 | 67,
358 | 34,
359 | 0,
360 | 49,
361 | 1,
362 | "STRING"
363 | ]
364 | ],
365 | "groups": [
366 | {
367 | "title": "Prompt Refinement (Bedrock Claude)",
368 | "bounding": [
369 | 779,
370 | 330,
371 | 406,
372 | 521
373 | ],
374 | "color": "#b58b2a",
375 | "font_size": 24,
376 | "locked": false
377 | },
378 | {
379 | "title": "Generate Image Variations (Bedrock TitanImage)",
380 | "bounding": [
381 | 1221,
382 | 328,
383 | 637,
384 | 1012
385 | ],
386 | "color": "#88A",
387 | "font_size": 24,
388 | "locked": false
389 | }
390 | ],
391 | "config": {},
392 | "extra": {
393 | "workspace_info": {
394 | "id": "c350a89c-0edd-4bef-9fc6-f83c239228a8"
395 | },
396 | "ds": {
397 | "scale": 0.6934334949441409,
398 | "offset": [
399 | -177.90243583055275,
400 | -224.28232740460032
401 | ]
402 | }
403 | },
404 | "version": 0.4
405 | }
--------------------------------------------------------------------------------
/workflows/img2vid_nova_reel.json:
--------------------------------------------------------------------------------
1 | {
2 | "last_node_id": 24,
3 | "last_link_id": 29,
4 | "nodes": [
5 | {
6 | "id": 9,
7 | "type": "PreviewImage",
8 | "pos": [
9 | 1080,
10 | 600
11 | ],
12 | "size": [
13 | 340,
14 | 246
15 | ],
16 | "flags": {},
17 | "order": 5,
18 | "mode": 0,
19 | "inputs": [
20 | {
21 | "name": "images",
22 | "type": "IMAGE",
23 | "link": 8
24 | }
25 | ],
26 | "outputs": [],
27 | "properties": {
28 | "Node name for S&R": "PreviewImage"
29 | },
30 | "widgets_values": []
31 | },
32 | {
33 | "id": 7,
34 | "type": "ShowText|pysssss",
35 | "pos": [
36 | 1080,
37 | 140
38 | ],
39 | "size": [
40 | 340,
41 | 100
42 | ],
43 | "flags": {},
44 | "order": 3,
45 | "mode": 0,
46 | "inputs": [
47 | {
48 | "name": "text",
49 | "type": "STRING",
50 | "link": 17,
51 | "widget": {
52 | "name": "text"
53 | }
54 | }
55 | ],
56 | "outputs": [
57 | {
58 | "name": "STRING",
59 | "type": "STRING",
60 | "links": [
61 | 12
62 | ],
63 | "slot_index": 0,
64 | "shape": 6
65 | }
66 | ],
67 | "properties": {
68 | "Node name for S&R": "ShowText|pysssss"
69 | },
70 | "widgets_values": [
71 | "",
72 | "/home/user/ComfyUI/output/2025-02-16_16-02-24_ygf408e6glpz/ygf408e6glpz.mp4"
73 | ]
74 | },
75 | {
76 | "id": 8,
77 | "type": "VHS_LoadVideoPath",
78 | "pos": [
79 | 1080,
80 | 300
81 | ],
82 | "size": [
83 | 340,
84 | 238
85 | ],
86 | "flags": {},
87 | "order": 4,
88 | "mode": 0,
89 | "inputs": [
90 | {
91 | "name": "meta_batch",
92 | "type": "VHS_BatchManager",
93 | "link": null,
94 | "shape": 7
95 | },
96 | {
97 | "name": "vae",
98 | "type": "VAE",
99 | "link": null,
100 | "shape": 7
101 | },
102 | {
103 | "name": "video",
104 | "type": "STRING",
105 | "link": 12,
106 | "widget": {
107 | "name": "video"
108 | }
109 | }
110 | ],
111 | "outputs": [
112 | {
113 | "name": "IMAGE",
114 | "type": "IMAGE",
115 | "links": [
116 | 8,
117 | 11
118 | ],
119 | "slot_index": 0
120 | },
121 | {
122 | "name": "frame_count",
123 | "type": "INT",
124 | "links": [],
125 | "slot_index": 1
126 | },
127 | {
128 | "name": "audio",
129 | "type": "AUDIO",
130 | "links": null
131 | },
132 | {
133 | "name": "video_info",
134 | "type": "VHS_VIDEOINFO",
135 | "links": null
136 | }
137 | ],
138 | "title": "Load Video (Path) - VHS",
139 | "properties": {
140 | "Node name for S&R": "VHS_LoadVideoPath"
141 | },
142 | "widgets_values": {
143 | "video": "",
144 | "force_rate": 0,
145 | "force_size": "Disabled",
146 | "custom_width": 512,
147 | "custom_height": 512,
148 | "frame_load_cap": 0,
149 | "skip_first_frames": 0,
150 | "select_every_nth": 1,
151 | "videopreview": {
152 | "hidden": false,
153 | "paused": true,
154 | "params": {
155 | "force_rate": 0,
156 | "frame_load_cap": 0,
157 | "skip_first_frames": 0,
158 | "select_every_nth": 1,
159 | "filename": "",
160 | "type": "path",
161 | "format": "video/"
162 | },
163 | "muted": false
164 | }
165 | }
166 | },
167 | {
168 | "id": 11,
169 | "type": "VHS_VideoCombine",
170 | "pos": [
171 | 1460,
172 | 140
173 | ],
174 | "size": [
175 | 680,
176 | 695.25
177 | ],
178 | "flags": {},
179 | "order": 6,
180 | "mode": 0,
181 | "inputs": [
182 | {
183 | "name": "images",
184 | "type": "IMAGE",
185 | "link": 11
186 | },
187 | {
188 | "name": "audio",
189 | "type": "AUDIO",
190 | "link": null,
191 | "shape": 7
192 | },
193 | {
194 | "name": "meta_batch",
195 | "type": "VHS_BatchManager",
196 | "link": null,
197 | "shape": 7
198 | },
199 | {
200 | "name": "vae",
201 | "type": "VAE",
202 | "link": null,
203 | "shape": 7
204 | }
205 | ],
206 | "outputs": [
207 | {
208 | "name": "Filenames",
209 | "type": "VHS_FILENAMES",
210 | "links": null
211 | }
212 | ],
213 | "title": "Video Combine - VHS",
214 | "properties": {
215 | "Node name for S&R": "VHS_VideoCombine"
216 | },
217 | "widgets_values": {
218 | "frame_rate": 24,
219 | "loop_count": 0,
220 | "filename_prefix": "luma_",
221 | "format": "video/h264-mp4",
222 | "pix_fmt": "yuv420p",
223 | "crf": 20,
224 | "save_metadata": true,
225 | "pingpong": false,
226 | "save_output": false,
227 | "videopreview": {
228 | "hidden": false,
229 | "paused": false,
230 | "params": {
231 | "filename": "luma__00002.mp4",
232 | "subfolder": "",
233 | "type": "temp",
234 | "format": "video/h264-mp4",
235 | "frame_rate": 24
236 | },
237 | "muted": false
238 | }
239 | }
240 | },
241 | {
242 | "id": 23,
243 | "type": "ImageResizeKJ",
244 | "pos": [
245 | 640,
246 | 500
247 | ],
248 | "size": [
249 | 400,
250 | 238
251 | ],
252 | "flags": {},
253 | "order": 1,
254 | "mode": 0,
255 | "inputs": [
256 | {
257 | "name": "image",
258 | "type": "IMAGE",
259 | "link": 24
260 | },
261 | {
262 | "name": "get_image_size",
263 | "type": "IMAGE",
264 | "link": null,
265 | "shape": 7
266 | },
267 | {
268 | "name": "width_input",
269 | "type": "INT",
270 | "link": null,
271 | "widget": {
272 | "name": "width_input"
273 | },
274 | "shape": 7
275 | },
276 | {
277 | "name": "height_input",
278 | "type": "INT",
279 | "link": null,
280 | "widget": {
281 | "name": "height_input"
282 | },
283 | "shape": 7
284 | }
285 | ],
286 | "outputs": [
287 | {
288 | "name": "IMAGE",
289 | "type": "IMAGE",
290 | "links": [
291 | 28
292 | ],
293 | "slot_index": 0
294 | },
295 | {
296 | "name": "width",
297 | "type": "INT",
298 | "links": [],
299 | "slot_index": 1
300 | },
301 | {
302 | "name": "height",
303 | "type": "INT",
304 | "links": [],
305 | "slot_index": 2
306 | }
307 | ],
308 | "properties": {
309 | "Node name for S&R": "ImageResizeKJ"
310 | },
311 | "widgets_values": [
312 | 1280,
313 | 720,
314 | "nearest-exact",
315 | false,
316 | 2,
317 | 0,
318 | 0,
319 | "disabled"
320 | ]
321 | },
322 | {
323 | "id": 17,
324 | "type": "Amazon Bedrock - Nova Reel Video",
325 | "pos": [
326 | 640,
327 | 140
328 | ],
329 | "size": [
330 | 400,
331 | 300
332 | ],
333 | "flags": {},
334 | "order": 2,
335 | "mode": 0,
336 | "inputs": [
337 | {
338 | "name": "image",
339 | "type": "IMAGE",
340 | "link": 28,
341 | "shape": 7
342 | }
343 | ],
344 | "outputs": [
345 | {
346 | "name": "STRING",
347 | "type": "STRING",
348 | "links": [
349 | 17
350 | ],
351 | "slot_index": 0
352 | }
353 | ],
354 | "properties": {
355 | "Node name for S&R": "Amazon Bedrock - Nova Reel Video"
356 | },
357 | "widgets_values": [
358 | "A high-contrast, mature anime sequence of Yuji Itadori, explicitly styled like Ghost in the Shell 1995 anime. His pink hair and strong physique rendered in that iconic 90s anime cel-shading technique with sharp shadows and detailed muscle definition. @style classic 90s anime, Ghost in the Shell aesthetic with muted colors and dramatic shadow planes. His cursed energy appears as ethereal, digital glitches similar to the Major's thermoptic camouflage effects.",
359 | "1280x720",
360 | 259062277,
361 | "randomize",
362 | "comfyui-workflow-outputs"
363 | ]
364 | },
365 | {
366 | "id": 18,
367 | "type": "LoadImage",
368 | "pos": [
369 | 200,
370 | 140
371 | ],
372 | "size": [
373 | 400,
374 | 314
375 | ],
376 | "flags": {},
377 | "order": 0,
378 | "mode": 0,
379 | "inputs": [],
380 | "outputs": [
381 | {
382 | "name": "IMAGE",
383 | "type": "IMAGE",
384 | "links": [
385 | 24
386 | ],
387 | "slot_index": 0
388 | },
389 | {
390 | "name": "MASK",
391 | "type": "MASK",
392 | "links": null
393 | }
394 | ],
395 | "properties": {
396 | "Node name for S&R": "LoadImage"
397 | },
398 | "widgets_values": [
399 | "anime_brute_force.jpg",
400 | "image"
401 | ]
402 | }
403 | ],
404 | "links": [
405 | [
406 | 8,
407 | 8,
408 | 0,
409 | 9,
410 | 0,
411 | "IMAGE"
412 | ],
413 | [
414 | 11,
415 | 8,
416 | 0,
417 | 11,
418 | 0,
419 | "IMAGE"
420 | ],
421 | [
422 | 12,
423 | 7,
424 | 0,
425 | 8,
426 | 2,
427 | "STRING"
428 | ],
429 | [
430 | 17,
431 | 17,
432 | 0,
433 | 7,
434 | 0,
435 | "STRING"
436 | ],
437 | [
438 | 24,
439 | 18,
440 | 0,
441 | 23,
442 | 0,
443 | "IMAGE"
444 | ],
445 | [
446 | 28,
447 | 23,
448 | 0,
449 | 17,
450 | 0,
451 | "IMAGE"
452 | ]
453 | ],
454 | "groups": [],
455 | "config": {},
456 | "extra": {
457 | "ds": {
458 | "scale": 1,
459 | "offset": {
460 | "0": -334.8046875,
461 | "1": -3.74609375
462 | }
463 | },
464 | "ue_links": []
465 | },
466 | "version": 0.4
467 | }
--------------------------------------------------------------------------------
/workflows/inpainting_with_natural_language.json:
--------------------------------------------------------------------------------
1 | {
2 | "last_node_id": 49,
3 | "last_link_id": 63,
4 | "nodes": [
5 | {
6 | "id": 8,
7 | "type": "ShowText|pysssss",
8 | "pos": [
9 | 824,
10 | 904.4087612092363
11 | ],
12 | "size": {
13 | "0": 367.124755859375,
14 | "1": 159.00286865234375
15 | },
16 | "flags": {},
17 | "order": 5,
18 | "mode": 0,
19 | "inputs": [
20 | {
21 | "name": "text",
22 | "type": "STRING",
23 | "link": 46,
24 | "widget": {
25 | "name": "text"
26 | }
27 | }
28 | ],
29 | "outputs": [
30 | {
31 | "name": "STRING",
32 | "type": "STRING",
33 | "links": null,
34 | "shape": 6,
35 | "slot_index": 0
36 | }
37 | ],
38 | "properties": {
39 | "Node name for S&R": "ShowText|pysssss"
40 | },
41 | "widgets_values": [
42 | "",
43 | "{\n \"to_replace\": \"duck\",\n \"replace_with\": \"flying eagle\"\n}"
44 | ]
45 | },
46 | {
47 | "id": 23,
48 | "type": "Prompt Template",
49 | "pos": [
50 | 820.4378327684955,
51 | 369.50059010150466
52 | ],
53 | "size": {
54 | "0": 376.08880615234375,
55 | "1": 212.24671936035156
56 | },
57 | "flags": {},
58 | "order": 2,
59 | "mode": 0,
60 | "inputs": [
61 | {
62 | "name": "prompt",
63 | "type": "STRING",
64 | "link": 17,
65 | "widget": {
66 | "name": "prompt"
67 | }
68 | }
69 | ],
70 | "outputs": [
71 | {
72 | "name": "STRING",
73 | "type": "STRING",
74 | "links": [
75 | 45
76 | ],
77 | "shape": 3,
78 | "slot_index": 0
79 | }
80 | ],
81 | "properties": {
82 | "Node name for S&R": "Prompt Template"
83 | },
84 | "widgets_values": [
85 | "",
86 | "You’re an image edit prompt analyser. Analyze the prompt \"[prompt]\" and output in JSON format with keys: “to_replace” (string), “replace_with” (string)."
87 | ]
88 | },
89 | {
90 | "id": 34,
91 | "type": "Bedrock - Claude",
92 | "pos": [
93 | 825,
94 | 632.4087612092363
95 | ],
96 | "size": {
97 | "0": 364.671875,
98 | "1": 196
99 | },
100 | "flags": {},
101 | "order": 4,
102 | "mode": 0,
103 | "inputs": [
104 | {
105 | "name": "prompt",
106 | "type": "STRING",
107 | "link": 45,
108 | "widget": {
109 | "name": "prompt"
110 | }
111 | }
112 | ],
113 | "outputs": [
114 | {
115 | "name": "STRING",
116 | "type": "STRING",
117 | "links": [
118 | 46,
119 | 57
120 | ],
121 | "shape": 3,
122 | "slot_index": 0
123 | }
124 | ],
125 | "properties": {
126 | "Node name for S&R": "Bedrock - Claude"
127 | },
128 | "widgets_values": [
129 | "",
130 | "anthropic.claude-3-haiku-20240307-v1:0",
131 | 200,
132 | 0.5,
133 | 1,
134 | 250
135 | ]
136 | },
137 | {
138 | "id": 45,
139 | "type": "JSON Text Extraction",
140 | "pos": [
141 | 1257,
142 | 375
143 | ],
144 | "size": {
145 | "0": 315,
146 | "1": 214
147 | },
148 | "flags": {},
149 | "order": 6,
150 | "mode": 0,
151 | "inputs": [
152 | {
153 | "name": "json_text",
154 | "type": "STRING",
155 | "link": 57,
156 | "widget": {
157 | "name": "json_text"
158 | }
159 | }
160 | ],
161 | "outputs": [
162 | {
163 | "name": "STRING",
164 | "type": "STRING",
165 | "links": [
166 | 58,
167 | 60
168 | ],
169 | "shape": 3,
170 | "slot_index": 0
171 | },
172 | {
173 | "name": "STRING",
174 | "type": "STRING",
175 | "links": [
176 | 59,
177 | 61
178 | ],
179 | "shape": 3,
180 | "slot_index": 1
181 | },
182 | {
183 | "name": "STRING",
184 | "type": "STRING",
185 | "links": null,
186 | "shape": 3
187 | },
188 | {
189 | "name": "STRING",
190 | "type": "STRING",
191 | "links": null,
192 | "shape": 3
193 | }
194 | ],
195 | "properties": {
196 | "Node name for S&R": "JSON Text Extraction"
197 | },
198 | "widgets_values": [
199 | "",
200 | "to_replace",
201 | "replace_with",
202 | "",
203 | ""
204 | ]
205 | },
206 | {
207 | "id": 46,
208 | "type": "ShowText|pysssss",
209 | "pos": [
210 | 1259,
211 | 693
212 | ],
213 | "size": {
214 | "0": 315,
215 | "1": 76
216 | },
217 | "flags": {},
218 | "order": 7,
219 | "mode": 0,
220 | "inputs": [
221 | {
222 | "name": "text",
223 | "type": "STRING",
224 | "link": 58,
225 | "widget": {
226 | "name": "text"
227 | }
228 | }
229 | ],
230 | "outputs": [
231 | {
232 | "name": "STRING",
233 | "type": "STRING",
234 | "links": null,
235 | "shape": 6
236 | }
237 | ],
238 | "properties": {
239 | "Node name for S&R": "ShowText|pysssss"
240 | },
241 | "widgets_values": [
242 | "",
243 | "duck"
244 | ]
245 | },
246 | {
247 | "id": 42,
248 | "type": "Bedrock - Titan Inpainting",
249 | "pos": [
250 | 1658,
251 | 370
252 | ],
253 | "size": {
254 | "0": 539.52294921875,
255 | "1": 263.39349365234375
256 | },
257 | "flags": {},
258 | "order": 9,
259 | "mode": 0,
260 | "inputs": [
261 | {
262 | "name": "image",
263 | "type": "IMAGE",
264 | "link": 62
265 | },
266 | {
267 | "name": "mask_image",
268 | "type": "IMAGE",
269 | "link": null
270 | },
271 | {
272 | "name": "prompt",
273 | "type": "STRING",
274 | "link": 61,
275 | "widget": {
276 | "name": "prompt"
277 | }
278 | },
279 | {
280 | "name": "mask_prompt",
281 | "type": "STRING",
282 | "link": 60,
283 | "widget": {
284 | "name": "mask_prompt"
285 | }
286 | }
287 | ],
288 | "outputs": [
289 | {
290 | "name": "IMAGE",
291 | "type": "IMAGE",
292 | "links": [
293 | 55
294 | ],
295 | "shape": 3,
296 | "slot_index": 0
297 | }
298 | ],
299 | "properties": {
300 | "Node name for S&R": "Bedrock - Titan Inpainting"
301 | },
302 | "widgets_values": [
303 | "",
304 | "",
305 | "duck",
306 | 2,
307 | 8,
308 | "1024 x 1024"
309 | ]
310 | },
311 | {
312 | "id": 33,
313 | "type": "PreviewImage",
314 | "pos": [
315 | 1663,
316 | 689
317 | ],
318 | "size": {
319 | "0": 530.0150756835938,
320 | "1": 404.49725341796875
321 | },
322 | "flags": {},
323 | "order": 10,
324 | "mode": 0,
325 | "inputs": [
326 | {
327 | "name": "images",
328 | "type": "IMAGE",
329 | "link": 55
330 | }
331 | ],
332 | "properties": {
333 | "Node name for S&R": "PreviewImage"
334 | }
335 | },
336 | {
337 | "id": 47,
338 | "type": "ShowText|pysssss",
339 | "pos": [
340 | 1263,
341 | 877
342 | ],
343 | "size": {
344 | "0": 315,
345 | "1": 76
346 | },
347 | "flags": {},
348 | "order": 8,
349 | "mode": 0,
350 | "inputs": [
351 | {
352 | "name": "text",
353 | "type": "STRING",
354 | "link": 59,
355 | "widget": {
356 | "name": "text"
357 | }
358 | }
359 | ],
360 | "outputs": [
361 | {
362 | "name": "STRING",
363 | "type": "STRING",
364 | "links": null,
365 | "shape": 6
366 | }
367 | ],
368 | "properties": {
369 | "Node name for S&R": "ShowText|pysssss"
370 | },
371 | "widgets_values": [
372 | "",
373 | "flying eagle"
374 | ]
375 | },
376 | {
377 | "id": 5,
378 | "type": "Text Multiline",
379 | "pos": [
380 | 400,
381 | 370
382 | ],
383 | "size": {
384 | "0": 379.65625,
385 | "1": 198.87109375
386 | },
387 | "flags": {},
388 | "order": 0,
389 | "mode": 0,
390 | "outputs": [
391 | {
392 | "name": "STRING",
393 | "type": "STRING",
394 | "links": [
395 | 17
396 | ],
397 | "shape": 3,
398 | "slot_index": 0
399 | }
400 | ],
401 | "title": "Image Editing Prompt",
402 | "properties": {
403 | "Node name for S&R": "Text Multiline"
404 | },
405 | "widgets_values": [
406 | "replace the duck with a flying eagle"
407 | ]
408 | },
409 | {
410 | "id": 49,
411 | "type": "PreviewImage",
412 | "pos": [
413 | 414,
414 | 814
415 | ],
416 | "size": [
417 | 351.83990478515625,
418 | 246
419 | ],
420 | "flags": {},
421 | "order": 3,
422 | "mode": 0,
423 | "inputs": [
424 | {
425 | "name": "images",
426 | "type": "IMAGE",
427 | "link": 63
428 | }
429 | ],
430 | "properties": {
431 | "Node name for S&R": "PreviewImage"
432 | }
433 | },
434 | {
435 | "id": 48,
436 | "type": "Image From URL",
437 | "pos": [
438 | 430,
439 | 650
440 | ],
441 | "size": {
442 | "0": 315,
443 | "1": 58
444 | },
445 | "flags": {},
446 | "order": 1,
447 | "mode": 0,
448 | "outputs": [
449 | {
450 | "name": "IMAGE",
451 | "type": "IMAGE",
452 | "links": [
453 | 62,
454 | 63
455 | ],
456 | "shape": 3,
457 | "slot_index": 0
458 | }
459 | ],
460 | "title": "Image to Edit from URL",
461 | "properties": {
462 | "Node name for S&R": "Image From URL"
463 | },
464 | "widgets_values": [
465 | "https://raw.githubusercontent.com/aws-samples/comfyui-llm-node-for-amazon-bedrock/540c07d564fd4d2d2523e752c2fdbdf9d9e874ac/assets/flying_duck.png"
466 | ]
467 | }
468 | ],
469 | "links": [
470 | [
471 | 17,
472 | 5,
473 | 0,
474 | 23,
475 | 0,
476 | "STRING"
477 | ],
478 | [
479 | 45,
480 | 23,
481 | 0,
482 | 34,
483 | 0,
484 | "STRING"
485 | ],
486 | [
487 | 46,
488 | 34,
489 | 0,
490 | 8,
491 | 0,
492 | "STRING"
493 | ],
494 | [
495 | 55,
496 | 42,
497 | 0,
498 | 33,
499 | 0,
500 | "IMAGE"
501 | ],
502 | [
503 | 57,
504 | 34,
505 | 0,
506 | 45,
507 | 0,
508 | "STRING"
509 | ],
510 | [
511 | 58,
512 | 45,
513 | 0,
514 | 46,
515 | 0,
516 | "STRING"
517 | ],
518 | [
519 | 59,
520 | 45,
521 | 1,
522 | 47,
523 | 0,
524 | "STRING"
525 | ],
526 | [
527 | 60,
528 | 45,
529 | 0,
530 | 42,
531 | 3,
532 | "STRING"
533 | ],
534 | [
535 | 61,
536 | 45,
537 | 1,
538 | 42,
539 | 2,
540 | "STRING"
541 | ],
542 | [
543 | 62,
544 | 48,
545 | 0,
546 | 42,
547 | 0,
548 | "IMAGE"
549 | ],
550 | [
551 | 63,
552 | 48,
553 | 0,
554 | 49,
555 | 0,
556 | "IMAGE"
557 | ]
558 | ],
559 | "groups": [
560 | {
561 | "title": "Convert Text to Inputs (Bedrock Claude)",
562 | "bounding": [
563 | 810,
564 | 295,
565 | 804,
566 | 817
567 | ],
568 | "color": "#b58b2a",
569 | "font_size": 24,
570 | "locked": false
571 | },
572 | {
573 | "title": "Object Detection and Inpainting(Bedrock TitanImage)",
574 | "bounding": [
575 | 1647,
576 | 294,
577 | 575,
578 | 822
579 | ],
580 | "color": "#88A",
581 | "font_size": 24,
582 | "locked": false
583 | }
584 | ],
585 | "config": {},
586 | "extra": {
587 | "workspace_info": {
588 | "id": "c350a89c-0edd-4bef-9fc6-f83c239228a8"
589 | },
590 | "ds": {
591 | "scale": 0.6727499949325618,
592 | "offset": [
593 | -337.0030387848718,
594 | -99.23183611701575
595 | ]
596 | }
597 | },
598 | "version": 0.4
599 | }
--------------------------------------------------------------------------------
/workflows/lama.json:
--------------------------------------------------------------------------------
1 | {
2 | "last_node_id": 35,
3 | "last_link_id": 71,
4 | "nodes": [
5 | {
6 | "id": 15,
7 | "type": "PreviewImage",
8 | "pos": [
9 | 1680,
10 | 650
11 | ],
12 | "size": {
13 | "0": 295.9647216796875,
14 | "1": 320.621337890625
15 | },
16 | "flags": {},
17 | "order": 11,
18 | "mode": 0,
19 | "inputs": [
20 | {
21 | "name": "images",
22 | "type": "IMAGE",
23 | "link": 23
24 | }
25 | ],
26 | "properties": {
27 | "Node name for S&R": "PreviewImage"
28 | }
29 | },
30 | {
31 | "id": 29,
32 | "type": "PreviewImage",
33 | "pos": [
34 | 1680,
35 | 1040
36 | ],
37 | "size": {
38 | "0": 295.9647216796875,
39 | "1": 320.621337890625
40 | },
41 | "flags": {},
42 | "order": 12,
43 | "mode": 0,
44 | "inputs": [
45 | {
46 | "name": "images",
47 | "type": "IMAGE",
48 | "link": 62
49 | }
50 | ],
51 | "properties": {
52 | "Node name for S&R": "PreviewImage"
53 | }
54 | },
55 | {
56 | "id": 8,
57 | "type": "PreviewImage",
58 | "pos": [
59 | 1680,
60 | 230
61 | ],
62 | "size": {
63 | "0": 299.51416015625,
64 | "1": 327.3353271484375
65 | },
66 | "flags": {},
67 | "order": 10,
68 | "mode": 0,
69 | "inputs": [
70 | {
71 | "name": "images",
72 | "type": "IMAGE",
73 | "link": 8
74 | }
75 | ],
76 | "properties": {
77 | "Node name for S&R": "PreviewImage"
78 | }
79 | },
80 | {
81 | "id": 31,
82 | "type": "PreviewImage",
83 | "pos": [
84 | 2410,
85 | 240
86 | ],
87 | "size": {
88 | "0": 299.51416015625,
89 | "1": 327.3353271484375
90 | },
91 | "flags": {},
92 | "order": 13,
93 | "mode": 0,
94 | "inputs": [
95 | {
96 | "name": "images",
97 | "type": "IMAGE",
98 | "link": 65
99 | }
100 | ],
101 | "properties": {
102 | "Node name for S&R": "PreviewImage"
103 | }
104 | },
105 | {
106 | "id": 33,
107 | "type": "PreviewImage",
108 | "pos": [
109 | 2410,
110 | 630
111 | ],
112 | "size": {
113 | "0": 299.51416015625,
114 | "1": 327.3353271484375
115 | },
116 | "flags": {},
117 | "order": 14,
118 | "mode": 0,
119 | "inputs": [
120 | {
121 | "name": "images",
122 | "type": "IMAGE",
123 | "link": 68
124 | }
125 | ],
126 | "properties": {
127 | "Node name for S&R": "PreviewImage"
128 | }
129 | },
130 | {
131 | "id": 35,
132 | "type": "PreviewImage",
133 | "pos": [
134 | 2420,
135 | 1020
136 | ],
137 | "size": {
138 | "0": 299.51416015625,
139 | "1": 327.3353271484375
140 | },
141 | "flags": {},
142 | "order": 15,
143 | "mode": 0,
144 | "inputs": [
145 | {
146 | "name": "images",
147 | "type": "IMAGE",
148 | "link": 71
149 | }
150 | ],
151 | "properties": {
152 | "Node name for S&R": "PreviewImage"
153 | }
154 | },
155 | {
156 | "id": 25,
157 | "type": "Reroute",
158 | "pos": [
159 | 1132,
160 | 727
161 | ],
162 | "size": [
163 | 75,
164 | 26
165 | ],
166 | "flags": {},
167 | "order": 3,
168 | "mode": 0,
169 | "inputs": [
170 | {
171 | "name": "",
172 | "type": "*",
173 | "link": 54
174 | }
175 | ],
176 | "outputs": [
177 | {
178 | "name": "",
179 | "type": "MASK",
180 | "links": [
181 | 58,
182 | 59,
183 | 61,
184 | 64,
185 | 67,
186 | 70
187 | ],
188 | "slot_index": 0
189 | }
190 | ],
191 | "properties": {
192 | "showOutputText": false,
193 | "horizontal": false
194 | }
195 | },
196 | {
197 | "id": 27,
198 | "type": "Reroute",
199 | "pos": [
200 | 1129,
201 | 439
202 | ],
203 | "size": [
204 | 75,
205 | 26
206 | ],
207 | "flags": {},
208 | "order": 2,
209 | "mode": 0,
210 | "inputs": [
211 | {
212 | "name": "",
213 | "type": "*",
214 | "link": 55
215 | }
216 | ],
217 | "outputs": [
218 | {
219 | "name": "",
220 | "type": "IMAGE",
221 | "links": [
222 | 56,
223 | 57,
224 | 60,
225 | 63,
226 | 66,
227 | 69
228 | ],
229 | "slot_index": 0
230 | }
231 | ],
232 | "properties": {
233 | "showOutputText": false,
234 | "horizontal": false
235 | }
236 | },
237 | {
238 | "id": 24,
239 | "type": "LayerMask: RemBgUltra",
240 | "pos": [
241 | 785,
242 | 772
243 | ],
244 | "size": {
245 | "0": 315,
246 | "1": 150
247 | },
248 | "flags": {},
249 | "order": 1,
250 | "mode": 0,
251 | "inputs": [
252 | {
253 | "name": "image",
254 | "type": "IMAGE",
255 | "link": 36
256 | }
257 | ],
258 | "outputs": [
259 | {
260 | "name": "image",
261 | "type": "IMAGE",
262 | "links": null,
263 | "shape": 3
264 | },
265 | {
266 | "name": "mask",
267 | "type": "MASK",
268 | "links": [
269 | 54
270 | ],
271 | "shape": 3,
272 | "slot_index": 1
273 | }
274 | ],
275 | "properties": {
276 | "Node name for S&R": "LayerMask: RemBgUltra"
277 | },
278 | "widgets_values": [
279 | 8,
280 | 0.01,
281 | 0.99,
282 | false
283 | ]
284 | },
285 | {
286 | "id": 2,
287 | "type": "LoadImage",
288 | "pos": [
289 | 431,
290 | 439
291 | ],
292 | "size": {
293 | "0": 323.62158203125,
294 | "1": 420.20391845703125
295 | },
296 | "flags": {},
297 | "order": 0,
298 | "mode": 0,
299 | "outputs": [
300 | {
301 | "name": "IMAGE",
302 | "type": "IMAGE",
303 | "links": [
304 | 36,
305 | 55
306 | ],
307 | "shape": 3,
308 | "slot_index": 0
309 | },
310 | {
311 | "name": "MASK",
312 | "type": "MASK",
313 | "links": null,
314 | "shape": 3
315 | }
316 | ],
317 | "properties": {
318 | "Node name for S&R": "LoadImage"
319 | },
320 | "widgets_values": [
321 | "512x512with_background (24).png",
322 | "image"
323 | ]
324 | },
325 | {
326 | "id": 7,
327 | "type": "LayerUtility: LaMa",
328 | "pos": [
329 | 1300,
330 | 320
331 | ],
332 | "size": {
333 | "0": 315,
334 | "1": 174
335 | },
336 | "flags": {},
337 | "order": 4,
338 | "mode": 0,
339 | "inputs": [
340 | {
341 | "name": "image",
342 | "type": "IMAGE",
343 | "link": 56
344 | },
345 | {
346 | "name": "mask",
347 | "type": "MASK",
348 | "link": 58
349 | }
350 | ],
351 | "outputs": [
352 | {
353 | "name": "image",
354 | "type": "IMAGE",
355 | "links": [
356 | 8
357 | ],
358 | "shape": 3,
359 | "slot_index": 0
360 | }
361 | ],
362 | "properties": {
363 | "Node name for S&R": "LayerUtility: LaMa"
364 | },
365 | "widgets_values": [
366 | "lama",
367 | "cuda",
368 | false,
369 | 25,
370 | 6
371 | ]
372 | },
373 | {
374 | "id": 14,
375 | "type": "LayerUtility: LaMa",
376 | "pos": [
377 | 1300,
378 | 710
379 | ],
380 | "size": {
381 | "0": 315,
382 | "1": 174
383 | },
384 | "flags": {},
385 | "order": 5,
386 | "mode": 0,
387 | "inputs": [
388 | {
389 | "name": "image",
390 | "type": "IMAGE",
391 | "link": 57
392 | },
393 | {
394 | "name": "mask",
395 | "type": "MASK",
396 | "link": 59
397 | }
398 | ],
399 | "outputs": [
400 | {
401 | "name": "image",
402 | "type": "IMAGE",
403 | "links": [
404 | 23
405 | ],
406 | "shape": 3,
407 | "slot_index": 0
408 | }
409 | ],
410 | "properties": {
411 | "Node name for S&R": "LayerUtility: LaMa"
412 | },
413 | "widgets_values": [
414 | "ldm",
415 | "cuda",
416 | false,
417 | 25,
418 | 6
419 | ]
420 | },
421 | {
422 | "id": 28,
423 | "type": "LayerUtility: LaMa",
424 | "pos": [
425 | 1290,
426 | 1030
427 | ],
428 | "size": {
429 | "0": 315,
430 | "1": 174
431 | },
432 | "flags": {},
433 | "order": 6,
434 | "mode": 0,
435 | "inputs": [
436 | {
437 | "name": "image",
438 | "type": "IMAGE",
439 | "link": 60
440 | },
441 | {
442 | "name": "mask",
443 | "type": "MASK",
444 | "link": 61
445 | }
446 | ],
447 | "outputs": [
448 | {
449 | "name": "image",
450 | "type": "IMAGE",
451 | "links": [
452 | 62
453 | ],
454 | "shape": 3,
455 | "slot_index": 0
456 | }
457 | ],
458 | "properties": {
459 | "Node name for S&R": "LayerUtility: LaMa"
460 | },
461 | "widgets_values": [
462 | "zits",
463 | "cuda",
464 | false,
465 | 25,
466 | 6
467 | ]
468 | },
469 | {
470 | "id": 30,
471 | "type": "LayerUtility: LaMa",
472 | "pos": [
473 | 2036,
474 | 314
475 | ],
476 | "size": {
477 | "0": 315,
478 | "1": 174
479 | },
480 | "flags": {},
481 | "order": 7,
482 | "mode": 0,
483 | "inputs": [
484 | {
485 | "name": "image",
486 | "type": "IMAGE",
487 | "link": 63
488 | },
489 | {
490 | "name": "mask",
491 | "type": "MASK",
492 | "link": 64
493 | }
494 | ],
495 | "outputs": [
496 | {
497 | "name": "image",
498 | "type": "IMAGE",
499 | "links": [
500 | 65
501 | ],
502 | "shape": 3,
503 | "slot_index": 0
504 | }
505 | ],
506 | "properties": {
507 | "Node name for S&R": "LayerUtility: LaMa"
508 | },
509 | "widgets_values": [
510 | "mat",
511 | "cuda",
512 | false,
513 | 25,
514 | 6
515 | ]
516 | },
517 | {
518 | "id": 32,
519 | "type": "LayerUtility: LaMa",
520 | "pos": [
521 | 2037,
522 | 724
523 | ],
524 | "size": {
525 | "0": 315,
526 | "1": 174
527 | },
528 | "flags": {},
529 | "order": 8,
530 | "mode": 0,
531 | "inputs": [
532 | {
533 | "name": "image",
534 | "type": "IMAGE",
535 | "link": 66
536 | },
537 | {
538 | "name": "mask",
539 | "type": "MASK",
540 | "link": 67
541 | }
542 | ],
543 | "outputs": [
544 | {
545 | "name": "image",
546 | "type": "IMAGE",
547 | "links": [
548 | 68
549 | ],
550 | "shape": 3,
551 | "slot_index": 0
552 | }
553 | ],
554 | "properties": {
555 | "Node name for S&R": "LayerUtility: LaMa"
556 | },
557 | "widgets_values": [
558 | "fcf",
559 | "cuda",
560 | false,
561 | 25,
562 | 6
563 | ]
564 | },
565 | {
566 | "id": 34,
567 | "type": "LayerUtility: LaMa",
568 | "pos": [
569 | 2043,
570 | 1042
571 | ],
572 | "size": {
573 | "0": 315,
574 | "1": 174
575 | },
576 | "flags": {},
577 | "order": 9,
578 | "mode": 0,
579 | "inputs": [
580 | {
581 | "name": "image",
582 | "type": "IMAGE",
583 | "link": 69
584 | },
585 | {
586 | "name": "mask",
587 | "type": "MASK",
588 | "link": 70
589 | }
590 | ],
591 | "outputs": [
592 | {
593 | "name": "image",
594 | "type": "IMAGE",
595 | "links": [
596 | 71
597 | ],
598 | "shape": 3,
599 | "slot_index": 0
600 | }
601 | ],
602 | "properties": {
603 | "Node name for S&R": "LayerUtility: LaMa"
604 | },
605 | "widgets_values": [
606 | "spread",
607 | "cuda",
608 | false,
609 | 25,
610 | 6
611 | ]
612 | }
613 | ],
614 | "links": [
615 | [
616 | 8,
617 | 7,
618 | 0,
619 | 8,
620 | 0,
621 | "IMAGE"
622 | ],
623 | [
624 | 23,
625 | 14,
626 | 0,
627 | 15,
628 | 0,
629 | "IMAGE"
630 | ],
631 | [
632 | 36,
633 | 2,
634 | 0,
635 | 24,
636 | 0,
637 | "IMAGE"
638 | ],
639 | [
640 | 54,
641 | 24,
642 | 1,
643 | 25,
644 | 0,
645 | "*"
646 | ],
647 | [
648 | 55,
649 | 2,
650 | 0,
651 | 27,
652 | 0,
653 | "*"
654 | ],
655 | [
656 | 56,
657 | 27,
658 | 0,
659 | 7,
660 | 0,
661 | "IMAGE"
662 | ],
663 | [
664 | 57,
665 | 27,
666 | 0,
667 | 14,
668 | 0,
669 | "IMAGE"
670 | ],
671 | [
672 | 58,
673 | 25,
674 | 0,
675 | 7,
676 | 1,
677 | "MASK"
678 | ],
679 | [
680 | 59,
681 | 25,
682 | 0,
683 | 14,
684 | 1,
685 | "MASK"
686 | ],
687 | [
688 | 60,
689 | 27,
690 | 0,
691 | 28,
692 | 0,
693 | "IMAGE"
694 | ],
695 | [
696 | 61,
697 | 25,
698 | 0,
699 | 28,
700 | 1,
701 | "MASK"
702 | ],
703 | [
704 | 62,
705 | 28,
706 | 0,
707 | 29,
708 | 0,
709 | "IMAGE"
710 | ],
711 | [
712 | 63,
713 | 27,
714 | 0,
715 | 30,
716 | 0,
717 | "IMAGE"
718 | ],
719 | [
720 | 64,
721 | 25,
722 | 0,
723 | 30,
724 | 1,
725 | "MASK"
726 | ],
727 | [
728 | 65,
729 | 30,
730 | 0,
731 | 31,
732 | 0,
733 | "IMAGE"
734 | ],
735 | [
736 | 66,
737 | 27,
738 | 0,
739 | 32,
740 | 0,
741 | "IMAGE"
742 | ],
743 | [
744 | 67,
745 | 25,
746 | 0,
747 | 32,
748 | 1,
749 | "MASK"
750 | ],
751 | [
752 | 68,
753 | 32,
754 | 0,
755 | 33,
756 | 0,
757 | "IMAGE"
758 | ],
759 | [
760 | 69,
761 | 27,
762 | 0,
763 | 34,
764 | 0,
765 | "IMAGE"
766 | ],
767 | [
768 | 70,
769 | 25,
770 | 0,
771 | 34,
772 | 1,
773 | "MASK"
774 | ],
775 | [
776 | 71,
777 | 34,
778 | 0,
779 | 35,
780 | 0,
781 | "IMAGE"
782 | ]
783 | ],
784 | "groups": [],
785 | "config": {},
786 | "extra": {},
787 | "version": 0.4
788 | }
789 |
--------------------------------------------------------------------------------
/workflows/sensitive erase.json:
--------------------------------------------------------------------------------
1 | {
2 | "last_node_id": 52,
3 | "last_link_id": 83,
4 | "nodes": [
5 | {
6 | "id": 25,
7 | "type": "PreviewImage",
8 | "pos": [
9 | 1789,
10 | 644
11 | ],
12 | "size": {
13 | "0": 210,
14 | "1": 246
15 | },
16 | "flags": {},
17 | "order": 13,
18 | "mode": 0,
19 | "inputs": [
20 | {
21 | "name": "images",
22 | "type": "IMAGE",
23 | "link": 47
24 | }
25 | ],
26 | "properties": {
27 | "Node name for S&R": "PreviewImage"
28 | }
29 | },
30 | {
31 | "id": 23,
32 | "type": "Image OCR By Textract",
33 | "pos": [
34 | -654,
35 | 420
36 | ],
37 | "size": {
38 | "0": 210,
39 | "1": 126
40 | },
41 | "flags": {},
42 | "order": 4,
43 | "mode": 0,
44 | "inputs": [
45 | {
46 | "name": "image",
47 | "type": "IMAGE",
48 | "link": 36
49 | }
50 | ],
51 | "outputs": [
52 | {
53 | "name": "Text",
54 | "type": "STRING",
55 | "links": [
56 | 38
57 | ],
58 | "shape": 3,
59 | "slot_index": 0
60 | },
61 | {
62 | "name": "Left",
63 | "type": "INT",
64 | "links": [],
65 | "shape": 3,
66 | "slot_index": 1
67 | },
68 | {
69 | "name": "Top",
70 | "type": "INT",
71 | "links": [],
72 | "shape": 3,
73 | "slot_index": 2
74 | },
75 | {
76 | "name": "Width",
77 | "type": "INT",
78 | "links": [],
79 | "shape": 3,
80 | "slot_index": 3
81 | },
82 | {
83 | "name": "Height",
84 | "type": "INT",
85 | "links": [],
86 | "shape": 3,
87 | "slot_index": 4
88 | },
89 | {
90 | "name": "Mask Image",
91 | "type": "IMAGE",
92 | "links": [
93 | 49
94 | ],
95 | "shape": 3,
96 | "slot_index": 5
97 | }
98 | ],
99 | "properties": {
100 | "Node name for S&R": "Image OCR By Textract"
101 | }
102 | },
103 | {
104 | "id": 15,
105 | "type": "Bedrock - Claude",
106 | "pos": [
107 | -322,
108 | 948
109 | ],
110 | "size": {
111 | "0": 400,
112 | "1": 200
113 | },
114 | "flags": {},
115 | "order": 8,
116 | "mode": 0,
117 | "inputs": [
118 | {
119 | "name": "prompt",
120 | "type": "STRING",
121 | "link": 30,
122 | "widget": {
123 | "name": "prompt"
124 | }
125 | }
126 | ],
127 | "outputs": [
128 | {
129 | "name": "STRING",
130 | "type": "STRING",
131 | "links": [
132 | 59
133 | ],
134 | "shape": 3,
135 | "slot_index": 0
136 | }
137 | ],
138 | "properties": {
139 | "Node name for S&R": "Bedrock - Claude"
140 | },
141 | "widgets_values": [
142 | "",
143 | "anthropic.claude-3-haiku-20240307-v1:0",
144 | 2048,
145 | 1,
146 | 1,
147 | 71
148 | ]
149 | },
150 | {
151 | "id": 40,
152 | "type": "String",
153 | "pos": [
154 | 347,
155 | 1165
156 | ],
157 | "size": {
158 | "0": 315,
159 | "1": 58
160 | },
161 | "flags": {},
162 | "order": 0,
163 | "mode": 0,
164 | "outputs": [
165 | {
166 | "name": "STRING",
167 | "type": "STRING",
168 | "links": [
169 | 58
170 | ],
171 | "shape": 3,
172 | "slot_index": 0
173 | }
174 | ],
175 | "properties": {
176 | "Node name for S&R": "String"
177 | },
178 | "widgets_values": [
179 | "Yes"
180 | ]
181 | },
182 | {
183 | "id": 48,
184 | "type": "MaskPreview+",
185 | "pos": [
186 | 756,
187 | 302
188 | ],
189 | "size": {
190 | "0": 210,
191 | "1": 246
192 | },
193 | "flags": {},
194 | "order": 3,
195 | "mode": 0,
196 | "inputs": [
197 | {
198 | "name": "mask",
199 | "type": "MASK",
200 | "link": 69
201 | }
202 | ],
203 | "properties": {
204 | "Node name for S&R": "MaskPreview+"
205 | }
206 | },
207 | {
208 | "id": 39,
209 | "type": "Compare",
210 | "pos": [
211 | 757,
212 | 1079
213 | ],
214 | "size": {
215 | "0": 315,
216 | "1": 78
217 | },
218 | "flags": {},
219 | "order": 10,
220 | "mode": 0,
221 | "inputs": [
222 | {
223 | "name": "a",
224 | "type": "*",
225 | "link": 59
226 | },
227 | {
228 | "name": "b",
229 | "type": "*",
230 | "link": 58
231 | }
232 | ],
233 | "outputs": [
234 | {
235 | "name": "B",
236 | "type": "BOOLEAN",
237 | "links": [
238 | 78
239 | ],
240 | "shape": 3,
241 | "slot_index": 0
242 | }
243 | ],
244 | "properties": {
245 | "Node name for S&R": "Compare"
246 | },
247 | "widgets_values": [
248 | "a == b"
249 | ]
250 | },
251 | {
252 | "id": 32,
253 | "type": "ImageToMask",
254 | "pos": [
255 | -296,
256 | 518
257 | ],
258 | "size": {
259 | "0": 315,
260 | "1": 58
261 | },
262 | "flags": {},
263 | "order": 7,
264 | "mode": 0,
265 | "inputs": [
266 | {
267 | "name": "image",
268 | "type": "IMAGE",
269 | "link": 49
270 | }
271 | ],
272 | "outputs": [
273 | {
274 | "name": "MASK",
275 | "type": "MASK",
276 | "links": [
277 | 50,
278 | 79
279 | ],
280 | "shape": 3,
281 | "slot_index": 0
282 | }
283 | ],
284 | "properties": {
285 | "Node name for S&R": "ImageToMask"
286 | },
287 | "widgets_values": [
288 | "green"
289 | ]
290 | },
291 | {
292 | "id": 51,
293 | "type": "If ANY execute A else B",
294 | "pos": [
295 | 947,
296 | 648
297 | ],
298 | "size": {
299 | "0": 210,
300 | "1": 66
301 | },
302 | "flags": {},
303 | "order": 11,
304 | "mode": 0,
305 | "inputs": [
306 | {
307 | "name": "ANY",
308 | "type": "*",
309 | "link": 78
310 | },
311 | {
312 | "name": "IF_TRUE",
313 | "type": "*",
314 | "link": 79
315 | },
316 | {
317 | "name": "IF_FALSE",
318 | "type": "*",
319 | "link": 80
320 | }
321 | ],
322 | "outputs": [
323 | {
324 | "name": "?",
325 | "type": "*",
326 | "links": [
327 | 81
328 | ],
329 | "shape": 3,
330 | "slot_index": 0
331 | }
332 | ],
333 | "properties": {
334 | "Node name for S&R": "If ANY execute A else B"
335 | }
336 | },
337 | {
338 | "id": 24,
339 | "type": "LayerUtility: LaMa",
340 | "pos": [
341 | 1384,
342 | 959
343 | ],
344 | "size": {
345 | "0": 315,
346 | "1": 174
347 | },
348 | "flags": {},
349 | "order": 12,
350 | "mode": 0,
351 | "inputs": [
352 | {
353 | "name": "image",
354 | "type": "IMAGE",
355 | "link": 83
356 | },
357 | {
358 | "name": "mask",
359 | "type": "MASK",
360 | "link": 81,
361 | "slot_index": 1
362 | }
363 | ],
364 | "outputs": [
365 | {
366 | "name": "image",
367 | "type": "IMAGE",
368 | "links": [
369 | 47
370 | ],
371 | "shape": 3,
372 | "slot_index": 0
373 | }
374 | ],
375 | "properties": {
376 | "Node name for S&R": "LayerUtility: LaMa"
377 | },
378 | "widgets_values": [
379 | "lama",
380 | "cuda",
381 | false,
382 | 25,
383 | 6
384 | ]
385 | },
386 | {
387 | "id": 47,
388 | "type": "SolidMask",
389 | "pos": [
390 | -312,
391 | 227
392 | ],
393 | "size": {
394 | "0": 315,
395 | "1": 106
396 | },
397 | "flags": {},
398 | "order": 1,
399 | "mode": 0,
400 | "outputs": [
401 | {
402 | "name": "MASK",
403 | "type": "MASK",
404 | "links": [
405 | 69,
406 | 80
407 | ],
408 | "shape": 3,
409 | "slot_index": 0
410 | }
411 | ],
412 | "properties": {
413 | "Node name for S&R": "SolidMask"
414 | },
415 | "widgets_values": [
416 | 0,
417 | 1000,
418 | 662
419 | ]
420 | },
421 | {
422 | "id": 33,
423 | "type": "MaskPreview+",
424 | "pos": [
425 | 511,
426 | 394
427 | ],
428 | "size": {
429 | "0": 210,
430 | "1": 246
431 | },
432 | "flags": {},
433 | "order": 9,
434 | "mode": 0,
435 | "inputs": [
436 | {
437 | "name": "mask",
438 | "type": "MASK",
439 | "link": 50
440 | }
441 | ],
442 | "properties": {
443 | "Node name for S&R": "MaskPreview+"
444 | }
445 | },
446 | {
447 | "id": 17,
448 | "type": "Prompt Template",
449 | "pos": [
450 | -400,
451 | 648
452 | ],
453 | "size": {
454 | "0": 521.4636840820312,
455 | "1": 208.76742553710938
456 | },
457 | "flags": {},
458 | "order": 6,
459 | "mode": 0,
460 | "inputs": [
461 | {
462 | "name": "prompt",
463 | "type": "STRING",
464 | "link": 38,
465 | "widget": {
466 | "name": "prompt"
467 | }
468 | }
469 | ],
470 | "outputs": [
471 | {
472 | "name": "STRING",
473 | "type": "STRING",
474 | "links": [
475 | 30
476 | ],
477 | "shape": 3,
478 | "slot_index": 0
479 | }
480 | ],
481 | "properties": {
482 | "Node name for S&R": "Prompt Template"
483 | },
484 | "widgets_values": [
485 | "",
486 | "判断如下文本中是否包含暴力,恐怖,色情,政治的内容\n[prompt]\n如果包含,返回\"Yes\", 否则返回\"No\"\n直接返回结果,不要包含其他任务内容"
487 | ]
488 | },
489 | {
490 | "id": 4,
491 | "type": "LoadImage",
492 | "pos": [
493 | -1091,
494 | 346
495 | ],
496 | "size": {
497 | "0": 315,
498 | "1": 314
499 | },
500 | "flags": {},
501 | "order": 2,
502 | "mode": 0,
503 | "outputs": [
504 | {
505 | "name": "IMAGE",
506 | "type": "IMAGE",
507 | "links": [
508 | 36,
509 | 82
510 | ],
511 | "shape": 3,
512 | "slot_index": 0
513 | },
514 | {
515 | "name": "MASK",
516 | "type": "MASK",
517 | "links": [],
518 | "shape": 3,
519 | "slot_index": 1
520 | }
521 | ],
522 | "properties": {
523 | "Node name for S&R": "LoadImage"
524 | },
525 | "widgets_values": [
526 | "textract_demo005 (1).png",
527 | "image"
528 | ]
529 | },
530 | {
531 | "id": 52,
532 | "type": "Reroute",
533 | "pos": [
534 | 726.544712798572,
535 | 819.4428242873535
536 | ],
537 | "size": [
538 | 75,
539 | 26
540 | ],
541 | "flags": {},
542 | "order": 5,
543 | "mode": 0,
544 | "inputs": [
545 | {
546 | "name": "",
547 | "type": "*",
548 | "link": 82
549 | }
550 | ],
551 | "outputs": [
552 | {
553 | "name": "",
554 | "type": "IMAGE",
555 | "links": [
556 | 83
557 | ],
558 | "slot_index": 0
559 | }
560 | ],
561 | "properties": {
562 | "showOutputText": false,
563 | "horizontal": false
564 | }
565 | }
566 | ],
567 | "links": [
568 | [
569 | 30,
570 | 17,
571 | 0,
572 | 15,
573 | 0,
574 | "STRING"
575 | ],
576 | [
577 | 36,
578 | 4,
579 | 0,
580 | 23,
581 | 0,
582 | "IMAGE"
583 | ],
584 | [
585 | 38,
586 | 23,
587 | 0,
588 | 17,
589 | 0,
590 | "STRING"
591 | ],
592 | [
593 | 47,
594 | 24,
595 | 0,
596 | 25,
597 | 0,
598 | "IMAGE"
599 | ],
600 | [
601 | 49,
602 | 23,
603 | 5,
604 | 32,
605 | 0,
606 | "IMAGE"
607 | ],
608 | [
609 | 50,
610 | 32,
611 | 0,
612 | 33,
613 | 0,
614 | "MASK"
615 | ],
616 | [
617 | 58,
618 | 40,
619 | 0,
620 | 39,
621 | 1,
622 | "*"
623 | ],
624 | [
625 | 59,
626 | 15,
627 | 0,
628 | 39,
629 | 0,
630 | "*"
631 | ],
632 | [
633 | 69,
634 | 47,
635 | 0,
636 | 48,
637 | 0,
638 | "MASK"
639 | ],
640 | [
641 | 78,
642 | 39,
643 | 0,
644 | 51,
645 | 0,
646 | "*"
647 | ],
648 | [
649 | 79,
650 | 32,
651 | 0,
652 | 51,
653 | 1,
654 | "*"
655 | ],
656 | [
657 | 80,
658 | 47,
659 | 0,
660 | 51,
661 | 2,
662 | "*"
663 | ],
664 | [
665 | 81,
666 | 51,
667 | 0,
668 | 24,
669 | 1,
670 | "MASK"
671 | ],
672 | [
673 | 82,
674 | 4,
675 | 0,
676 | 52,
677 | 0,
678 | "*"
679 | ],
680 | [
681 | 83,
682 | 52,
683 | 0,
684 | 24,
685 | 0,
686 | "IMAGE"
687 | ]
688 | ],
689 | "groups": [],
690 | "config": {},
691 | "extra": {
692 | "ds": {
693 | "scale": 0.7513148009015782,
694 | "offset": {
695 | "0": -256.5145409235732,
696 | "1": -365.5770235061039
697 | }
698 | }
699 | },
700 | "version": 0.4
701 | }
--------------------------------------------------------------------------------
/workflows/subtitles_translate.json:
--------------------------------------------------------------------------------
1 | {
2 | "last_node_id": 41,
3 | "last_link_id": 58,
4 | "nodes": [
5 | {
6 | "id": 11,
7 | "type": "LayerStyle: DropShadow",
8 | "pos": [
9 | 1364,
10 | 274
11 | ],
12 | "size": {
13 | "0": 315,
14 | "1": 266
15 | },
16 | "flags": {},
17 | "order": 11,
18 | "mode": 0,
19 | "inputs": [
20 | {
21 | "name": "background_image",
22 | "type": "IMAGE",
23 | "link": 46
24 | },
25 | {
26 | "name": "layer_image",
27 | "type": "IMAGE",
28 | "link": 28
29 | },
30 | {
31 | "name": "layer_mask",
32 | "type": "MASK",
33 | "link": null
34 | }
35 | ],
36 | "outputs": [
37 | {
38 | "name": "image",
39 | "type": "IMAGE",
40 | "links": [
41 | 20
42 | ],
43 | "shape": 3,
44 | "slot_index": 0
45 | }
46 | ],
47 | "properties": {
48 | "Node name for S&R": "LayerStyle: DropShadow"
49 | },
50 | "widgets_values": [
51 | true,
52 | "normal",
53 | 75,
54 | 8,
55 | 8,
56 | 2,
57 | 8,
58 | "#000000"
59 | ]
60 | },
61 | {
62 | "id": 24,
63 | "type": "LayerUtility: LaMa",
64 | "pos": [
65 | 833,
66 | 102
67 | ],
68 | "size": {
69 | "0": 315,
70 | "1": 174
71 | },
72 | "flags": {},
73 | "order": 8,
74 | "mode": 0,
75 | "inputs": [
76 | {
77 | "name": "image",
78 | "type": "IMAGE",
79 | "link": 44
80 | },
81 | {
82 | "name": "mask",
83 | "type": "MASK",
84 | "link": 56
85 | }
86 | ],
87 | "outputs": [
88 | {
89 | "name": "image",
90 | "type": "IMAGE",
91 | "links": [
92 | 45,
93 | 46,
94 | 47
95 | ],
96 | "shape": 3,
97 | "slot_index": 0
98 | }
99 | ],
100 | "properties": {
101 | "Node name for S&R": "LayerUtility: LaMa"
102 | },
103 | "widgets_values": [
104 | "lama",
105 | "cuda",
106 | false,
107 | 25,
108 | 6
109 | ]
110 | },
111 | {
112 | "id": 32,
113 | "type": "ImageToMask",
114 | "pos": [
115 | -25.47701164649243,
116 | 402.810252108054
117 | ],
118 | "size": {
119 | "0": 315,
120 | "1": 58
121 | },
122 | "flags": {},
123 | "order": 4,
124 | "mode": 0,
125 | "inputs": [
126 | {
127 | "name": "image",
128 | "type": "IMAGE",
129 | "link": 49
130 | }
131 | ],
132 | "outputs": [
133 | {
134 | "name": "MASK",
135 | "type": "MASK",
136 | "links": [
137 | 50,
138 | 56
139 | ],
140 | "shape": 3,
141 | "slot_index": 0
142 | }
143 | ],
144 | "properties": {
145 | "Node name for S&R": "ImageToMask"
146 | },
147 | "widgets_values": [
148 | "green"
149 | ]
150 | },
151 | {
152 | "id": 33,
153 | "type": "MaskPreview+",
154 | "pos": [
155 | 601,
156 | 267
157 | ],
158 | "size": {
159 | "0": 210,
160 | "1": 246
161 | },
162 | "flags": {},
163 | "order": 7,
164 | "mode": 0,
165 | "inputs": [
166 | {
167 | "name": "mask",
168 | "type": "MASK",
169 | "link": 50
170 | }
171 | ],
172 | "properties": {
173 | "Node name for S&R": "MaskPreview+"
174 | }
175 | },
176 | {
177 | "id": 25,
178 | "type": "PreviewImage",
179 | "pos": [
180 | 1452,
181 | -35
182 | ],
183 | "size": {
184 | "0": 210,
185 | "1": 246
186 | },
187 | "flags": {},
188 | "order": 10,
189 | "mode": 0,
190 | "inputs": [
191 | {
192 | "name": "images",
193 | "type": "IMAGE",
194 | "link": 47
195 | }
196 | ],
197 | "properties": {
198 | "Node name for S&R": "PreviewImage"
199 | }
200 | },
201 | {
202 | "id": 2,
203 | "type": "PreviewImage",
204 | "pos": [
205 | 1904,
206 | 273
207 | ],
208 | "size": {
209 | "0": 714.4351806640625,
210 | "1": 444.59918212890625
211 | },
212 | "flags": {},
213 | "order": 12,
214 | "mode": 0,
215 | "inputs": [
216 | {
217 | "name": "images",
218 | "type": "IMAGE",
219 | "link": 20
220 | }
221 | ],
222 | "properties": {
223 | "Node name for S&R": "PreviewImage"
224 | }
225 | },
226 | {
227 | "id": 23,
228 | "type": "Image OCR By Textract",
229 | "pos": [
230 | -654,
231 | 420
232 | ],
233 | "size": {
234 | "0": 210,
235 | "1": 126
236 | },
237 | "flags": {},
238 | "order": 2,
239 | "mode": 0,
240 | "inputs": [
241 | {
242 | "name": "image",
243 | "type": "IMAGE",
244 | "link": 36
245 | }
246 | ],
247 | "outputs": [
248 | {
249 | "name": "Text",
250 | "type": "STRING",
251 | "links": [
252 | 38
253 | ],
254 | "shape": 3,
255 | "slot_index": 0
256 | },
257 | {
258 | "name": "Left",
259 | "type": "INT",
260 | "links": [
261 | 39
262 | ],
263 | "shape": 3,
264 | "slot_index": 1
265 | },
266 | {
267 | "name": "Top",
268 | "type": "INT",
269 | "links": [
270 | 40
271 | ],
272 | "shape": 3,
273 | "slot_index": 2
274 | },
275 | {
276 | "name": "Width",
277 | "type": "INT",
278 | "links": [
279 | 41
280 | ],
281 | "shape": 3,
282 | "slot_index": 3
283 | },
284 | {
285 | "name": "Height",
286 | "type": "INT",
287 | "links": [
288 | 42
289 | ],
290 | "shape": 3,
291 | "slot_index": 4
292 | },
293 | {
294 | "name": "Mask Image",
295 | "type": "IMAGE",
296 | "links": [
297 | 49
298 | ],
299 | "shape": 3,
300 | "slot_index": 5
301 | }
302 | ],
303 | "properties": {
304 | "Node name for S&R": "Image OCR By Textract"
305 | }
306 | },
307 | {
308 | "id": 15,
309 | "type": "Bedrock - Claude",
310 | "pos": [
311 | 305,
312 | 811
313 | ],
314 | "size": {
315 | "0": 400,
316 | "1": 200
317 | },
318 | "flags": {},
319 | "order": 5,
320 | "mode": 0,
321 | "inputs": [
322 | {
323 | "name": "prompt",
324 | "type": "STRING",
325 | "link": 30,
326 | "widget": {
327 | "name": "prompt"
328 | }
329 | }
330 | ],
331 | "outputs": [
332 | {
333 | "name": "STRING",
334 | "type": "STRING",
335 | "links": [
336 | 29
337 | ],
338 | "shape": 3,
339 | "slot_index": 0
340 | }
341 | ],
342 | "properties": {
343 | "Node name for S&R": "Bedrock - Claude"
344 | },
345 | "widgets_values": [
346 | "",
347 | "anthropic.claude-3-haiku-20240307-v1:0",
348 | 2048,
349 | 1,
350 | 1,
351 | 71
352 | ]
353 | },
354 | {
355 | "id": 9,
356 | "type": "LayerUtility: SimpleTextImage",
357 | "pos": [
358 | 1316,
359 | 619
360 | ],
361 | "size": {
362 | "0": 392.5400695800781,
363 | "1": 406.2833557128906
364 | },
365 | "flags": {},
366 | "order": 9,
367 | "mode": 0,
368 | "inputs": [
369 | {
370 | "name": "size_as",
371 | "type": "*",
372 | "link": 45
373 | },
374 | {
375 | "name": "text",
376 | "type": "STRING",
377 | "link": 29,
378 | "widget": {
379 | "name": "text"
380 | }
381 | },
382 | {
383 | "name": "x_offset",
384 | "type": "INT",
385 | "link": 39,
386 | "widget": {
387 | "name": "x_offset"
388 | }
389 | },
390 | {
391 | "name": "y_offset",
392 | "type": "INT",
393 | "link": 40,
394 | "widget": {
395 | "name": "y_offset"
396 | }
397 | },
398 | {
399 | "name": "width",
400 | "type": "INT",
401 | "link": 41,
402 | "widget": {
403 | "name": "width"
404 | }
405 | },
406 | {
407 | "name": "height",
408 | "type": "INT",
409 | "link": 42,
410 | "widget": {
411 | "name": "height"
412 | }
413 | }
414 | ],
415 | "outputs": [
416 | {
417 | "name": "image",
418 | "type": "IMAGE",
419 | "links": [
420 | 28
421 | ],
422 | "shape": 3,
423 | "slot_index": 0
424 | },
425 | {
426 | "name": "mask",
427 | "type": "MASK",
428 | "links": null,
429 | "shape": 3
430 | }
431 | ],
432 | "properties": {
433 | "Node name for S&R": "LayerUtility: SimpleTextImage"
434 | },
435 | "widgets_values": [
436 | "Are you going to Scarborough Fair?\nParsley, sage, rosemary, and thyme.\nRemember me to one who lives there,\nShe once was the true love of mine.",
437 | "Alibaba-PuHuiTi-Heavy.ttf",
438 | "left",
439 | 80,
440 | 8,
441 | 50,
442 | "#FFFFFF",
443 | 0,
444 | "#FF8000",
445 | 0,
446 | 300,
447 | 512,
448 | 512
449 | ]
450 | },
451 | {
452 | "id": 40,
453 | "type": "String",
454 | "pos": [
455 | -99.1735806485697,
456 | 1134.5376124549446
457 | ],
458 | "size": {
459 | "0": 315,
460 | "1": 58
461 | },
462 | "flags": {},
463 | "order": 0,
464 | "mode": 0,
465 | "outputs": [
466 | {
467 | "name": "STRING",
468 | "type": "STRING",
469 | "links": [
470 | 58
471 | ],
472 | "shape": 3,
473 | "slot_index": 0
474 | }
475 | ],
476 | "properties": {
477 | "Node name for S&R": "String"
478 | },
479 | "widgets_values": [
480 | "浪人"
481 | ]
482 | },
483 | {
484 | "id": 39,
485 | "type": "Compare",
486 | "pos": [
487 | 306,
488 | 1091
489 | ],
490 | "size": {
491 | "0": 315,
492 | "1": 78
493 | },
494 | "flags": {},
495 | "order": 6,
496 | "mode": 0,
497 | "inputs": [
498 | {
499 | "name": "a",
500 | "type": "*",
501 | "link": 57
502 | },
503 | {
504 | "name": "b",
505 | "type": "*",
506 | "link": 58
507 | }
508 | ],
509 | "outputs": [
510 | {
511 | "name": "B",
512 | "type": "BOOLEAN",
513 | "links": null,
514 | "shape": 3,
515 | "slot_index": 0
516 | }
517 | ],
518 | "properties": {
519 | "Node name for S&R": "Compare"
520 | },
521 | "widgets_values": [
522 | "a == b"
523 | ]
524 | },
525 | {
526 | "id": 4,
527 | "type": "LoadImage",
528 | "pos": [
529 | -1091,
530 | 346
531 | ],
532 | "size": {
533 | "0": 315,
534 | "1": 314
535 | },
536 | "flags": {},
537 | "order": 1,
538 | "mode": 0,
539 | "outputs": [
540 | {
541 | "name": "IMAGE",
542 | "type": "IMAGE",
543 | "links": [
544 | 36,
545 | 44
546 | ],
547 | "shape": 3,
548 | "slot_index": 0
549 | },
550 | {
551 | "name": "MASK",
552 | "type": "MASK",
553 | "links": [],
554 | "shape": 3,
555 | "slot_index": 1
556 | }
557 | ],
558 | "properties": {
559 | "Node name for S&R": "LoadImage"
560 | },
561 | "widgets_values": [
562 | "textract_demo005.png",
563 | "image"
564 | ]
565 | },
566 | {
567 | "id": 17,
568 | "type": "Prompt Template",
569 | "pos": [
570 | -398,
571 | 661
572 | ],
573 | "size": {
574 | "0": 521.4636840820312,
575 | "1": 208.76742553710938
576 | },
577 | "flags": {},
578 | "order": 3,
579 | "mode": 0,
580 | "inputs": [
581 | {
582 | "name": "prompt",
583 | "type": "STRING",
584 | "link": 38,
585 | "widget": {
586 | "name": "prompt"
587 | }
588 | }
589 | ],
590 | "outputs": [
591 | {
592 | "name": "STRING",
593 | "type": "STRING",
594 | "links": [
595 | 30,
596 | 57
597 | ],
598 | "shape": 3,
599 | "slot_index": 0
600 | }
601 | ],
602 | "properties": {
603 | "Node name for S&R": "Prompt Template"
604 | },
605 | "widgets_values": [
606 | "",
607 | "翻译如下英文为日语\n[prompt]\n直接返回翻译后文本"
608 | ]
609 | }
610 | ],
611 | "links": [
612 | [
613 | 20,
614 | 11,
615 | 0,
616 | 2,
617 | 0,
618 | "IMAGE"
619 | ],
620 | [
621 | 28,
622 | 9,
623 | 0,
624 | 11,
625 | 1,
626 | "IMAGE"
627 | ],
628 | [
629 | 29,
630 | 15,
631 | 0,
632 | 9,
633 | 1,
634 | "STRING"
635 | ],
636 | [
637 | 30,
638 | 17,
639 | 0,
640 | 15,
641 | 0,
642 | "STRING"
643 | ],
644 | [
645 | 36,
646 | 4,
647 | 0,
648 | 23,
649 | 0,
650 | "IMAGE"
651 | ],
652 | [
653 | 38,
654 | 23,
655 | 0,
656 | 17,
657 | 0,
658 | "STRING"
659 | ],
660 | [
661 | 39,
662 | 23,
663 | 1,
664 | 9,
665 | 2,
666 | "INT"
667 | ],
668 | [
669 | 40,
670 | 23,
671 | 2,
672 | 9,
673 | 3,
674 | "INT"
675 | ],
676 | [
677 | 41,
678 | 23,
679 | 3,
680 | 9,
681 | 4,
682 | "INT"
683 | ],
684 | [
685 | 42,
686 | 23,
687 | 4,
688 | 9,
689 | 5,
690 | "INT"
691 | ],
692 | [
693 | 44,
694 | 4,
695 | 0,
696 | 24,
697 | 0,
698 | "IMAGE"
699 | ],
700 | [
701 | 45,
702 | 24,
703 | 0,
704 | 9,
705 | 0,
706 | "*"
707 | ],
708 | [
709 | 46,
710 | 24,
711 | 0,
712 | 11,
713 | 0,
714 | "IMAGE"
715 | ],
716 | [
717 | 47,
718 | 24,
719 | 0,
720 | 25,
721 | 0,
722 | "IMAGE"
723 | ],
724 | [
725 | 49,
726 | 23,
727 | 5,
728 | 32,
729 | 0,
730 | "IMAGE"
731 | ],
732 | [
733 | 50,
734 | 32,
735 | 0,
736 | 33,
737 | 0,
738 | "MASK"
739 | ],
740 | [
741 | 56,
742 | 32,
743 | 0,
744 | 24,
745 | 1,
746 | "MASK"
747 | ],
748 | [
749 | 57,
750 | 17,
751 | 0,
752 | 39,
753 | 0,
754 | "*"
755 | ],
756 | [
757 | 58,
758 | 40,
759 | 0,
760 | 39,
761 | 1,
762 | "*"
763 | ]
764 | ],
765 | "groups": [],
766 | "config": {},
767 | "extra": {
768 | "ds": {
769 | "scale": 1.1000000000000005,
770 | "offset": {
771 | "0": 986.1125011031153,
772 | "1": -372.55607836403607
773 | }
774 | }
775 | },
776 | "version": 0.4
777 | }
--------------------------------------------------------------------------------
/workflows/text2img_with_prompt_refinement.json:
--------------------------------------------------------------------------------
1 | {
2 | "last_node_id": 51,
3 | "last_link_id": 62,
4 | "nodes": [
5 | {
6 | "id": 38,
7 | "type": "PreviewImage",
8 | "pos": {
9 | "0": 1254,
10 | "1": 1215
11 | },
12 | "size": {
13 | "0": 326.11907958984375,
14 | "1": 246.51878356933594
15 | },
16 | "flags": {},
17 | "order": 4,
18 | "mode": 0,
19 | "inputs": [
20 | {
21 | "name": "images",
22 | "type": "IMAGE",
23 | "link": 52
24 | }
25 | ],
26 | "outputs": [],
27 | "properties": {
28 | "Node name for S&R": "PreviewImage"
29 | },
30 | "widgets_values": []
31 | },
32 | {
33 | "id": 37,
34 | "type": "Bedrock - SDXL",
35 | "pos": {
36 | "0": 818.9974975585938,
37 | "1": 1207.5206298828125
38 | },
39 | "size": {
40 | "0": 400,
41 | "1": 268
42 | },
43 | "flags": {},
44 | "order": 2,
45 | "mode": 0,
46 | "inputs": [
47 | {
48 | "name": "prompt",
49 | "type": "STRING",
50 | "link": 51,
51 | "widget": {
52 | "name": "prompt"
53 | }
54 | }
55 | ],
56 | "outputs": [
57 | {
58 | "name": "IMAGE",
59 | "type": "IMAGE",
60 | "links": [
61 | 52
62 | ],
63 | "slot_index": 0,
64 | "shape": 3
65 | }
66 | ],
67 | "properties": {
68 | "Node name for S&R": "Bedrock - SDXL"
69 | },
70 | "widgets_values": [
71 | "",
72 | "1024 x 1024",
73 | "None",
74 | 6.985,
75 | 30,
76 | "NONE",
77 | "Auto",
78 | 4126975093,
79 | "randomize"
80 | ]
81 | },
82 | {
83 | "id": 29,
84 | "type": "Bedrock - SDXL",
85 | "pos": {
86 | "0": 1263.5296630859375,
87 | "1": 370.2948303222656
88 | },
89 | "size": {
90 | "0": 400,
91 | "1": 268
92 | },
93 | "flags": {},
94 | "order": 6,
95 | "mode": 0,
96 | "inputs": [
97 | {
98 | "name": "prompt",
99 | "type": "STRING",
100 | "link": 47,
101 | "widget": {
102 | "name": "prompt"
103 | }
104 | }
105 | ],
106 | "outputs": [
107 | {
108 | "name": "IMAGE",
109 | "type": "IMAGE",
110 | "links": [
111 | 33
112 | ],
113 | "slot_index": 0,
114 | "shape": 3
115 | }
116 | ],
117 | "properties": {
118 | "Node name for S&R": "Bedrock - SDXL"
119 | },
120 | "widgets_values": [
121 | "",
122 | "1024 x 1024",
123 | "cinematic",
124 | 6.978,
125 | 30,
126 | "NONE",
127 | "Auto",
128 | 4144356433,
129 | "randomize"
130 | ]
131 | },
132 | {
133 | "id": 8,
134 | "type": "ShowText|pysssss",
135 | "pos": {
136 | "0": 824,
137 | "1": 904
138 | },
139 | "size": {
140 | "0": 367.124755859375,
141 | "1": 159.00286865234375
142 | },
143 | "flags": {
144 | "collapsed": false
145 | },
146 | "order": 5,
147 | "mode": 0,
148 | "inputs": [
149 | {
150 | "name": "text",
151 | "type": "STRING",
152 | "link": 46,
153 | "widget": {
154 | "name": "text"
155 | }
156 | }
157 | ],
158 | "outputs": [
159 | {
160 | "name": "STRING",
161 | "type": "STRING",
162 | "links": null,
163 | "slot_index": 0,
164 | "shape": 6
165 | }
166 | ],
167 | "properties": {
168 | "Node name for S&R": "ShowText|pysssss"
169 | },
170 | "widgets_values": [
171 | "",
172 | "A small, fluffy puppy with big, curious eyes sits in a lush, green meadow, surrounded by vibrant wildflowers and a clear, azure sky, creating a serene and picturesque scene that evokes a sense of warmth, wonder, and tranquility."
173 | ]
174 | },
175 | {
176 | "id": 27,
177 | "type": "PreviewImage",
178 | "pos": {
179 | "0": 1268,
180 | "1": 700
181 | },
182 | "size": {
183 | "0": 392.57623291015625,
184 | "1": 390.4400329589844
185 | },
186 | "flags": {},
187 | "order": 9,
188 | "mode": 0,
189 | "inputs": [
190 | {
191 | "name": "images",
192 | "type": "IMAGE",
193 | "link": 33
194 | }
195 | ],
196 | "outputs": [],
197 | "properties": {
198 | "Node name for S&R": "PreviewImage"
199 | },
200 | "widgets_values": []
201 | },
202 | {
203 | "id": 36,
204 | "type": "Bedrock - Titan Text to Image",
205 | "pos": {
206 | "0": 1719,
207 | "1": 379
208 | },
209 | "size": {
210 | "0": 400,
211 | "1": 220
212 | },
213 | "flags": {},
214 | "order": 7,
215 | "mode": 0,
216 | "inputs": [
217 | {
218 | "name": "prompt",
219 | "type": "STRING",
220 | "link": 49,
221 | "widget": {
222 | "name": "prompt"
223 | }
224 | }
225 | ],
226 | "outputs": [
227 | {
228 | "name": "IMAGE",
229 | "type": "IMAGE",
230 | "links": [
231 | 50
232 | ],
233 | "slot_index": 0,
234 | "shape": 3
235 | }
236 | ],
237 | "properties": {
238 | "Node name for S&R": "Bedrock - Titan Text to Image"
239 | },
240 | "widgets_values": [
241 | "",
242 | 1,
243 | "standard",
244 | "1024 x 1024",
245 | 7,
246 | 1518502167,
247 | "randomize"
248 | ]
249 | },
250 | {
251 | "id": 33,
252 | "type": "PreviewImage",
253 | "pos": {
254 | "0": 1720,
255 | "1": 651
256 | },
257 | "size": {
258 | "0": 390.7298889160156,
259 | "1": 441.3887634277344
260 | },
261 | "flags": {},
262 | "order": 10,
263 | "mode": 0,
264 | "inputs": [
265 | {
266 | "name": "images",
267 | "type": "IMAGE",
268 | "link": 50
269 | }
270 | ],
271 | "outputs": [],
272 | "properties": {
273 | "Node name for S&R": "PreviewImage"
274 | },
275 | "widgets_values": []
276 | },
277 | {
278 | "id": 51,
279 | "type": "Bedrock - Amazon Nova Canvas Text to Image",
280 | "pos": {
281 | "0": 2168.3203125,
282 | "1": 379.58001708984375
283 | },
284 | "size": {
285 | "0": 487.26812744140625,
286 | "1": 239.79922485351562
287 | },
288 | "flags": {},
289 | "order": 8,
290 | "mode": 0,
291 | "inputs": [
292 | {
293 | "name": "prompt",
294 | "type": 0,
295 | "link": 61,
296 | "widget": {
297 | "name": "prompt"
298 | }
299 | }
300 | ],
301 | "outputs": [
302 | {
303 | "name": "IMAGE",
304 | "type": "IMAGE",
305 | "links": [
306 | 62
307 | ]
308 | }
309 | ],
310 | "properties": {
311 | "Node name for S&R": "Bedrock - Amazon Nova Canvas Text to Image"
312 | },
313 | "widgets_values": [
314 | "",
315 | 2,
316 | "1280 x 720",
317 | 7.4,
318 | 767134676,
319 | "randomize",
320 | "premium",
321 | ""
322 | ]
323 | },
324 | {
325 | "id": 43,
326 | "type": "PreviewImage",
327 | "pos": {
328 | "0": 2164.3203125,
329 | "1": 675.5799560546875
330 | },
331 | "size": {
332 | "0": 497.06298828125,
333 | "1": 405.6451721191406
334 | },
335 | "flags": {},
336 | "order": 11,
337 | "mode": 0,
338 | "inputs": [
339 | {
340 | "name": "images",
341 | "type": "IMAGE",
342 | "link": 62
343 | }
344 | ],
345 | "outputs": [],
346 | "properties": {
347 | "Node name for S&R": "PreviewImage"
348 | },
349 | "widgets_values": []
350 | },
351 | {
352 | "id": 34,
353 | "type": "Bedrock - Claude",
354 | "pos": {
355 | "0": 825,
356 | "1": 632
357 | },
358 | "size": {
359 | "0": 364.671875,
360 | "1": 196
361 | },
362 | "flags": {},
363 | "order": 3,
364 | "mode": 0,
365 | "inputs": [
366 | {
367 | "name": "prompt",
368 | "type": "STRING",
369 | "link": 45,
370 | "widget": {
371 | "name": "prompt"
372 | }
373 | }
374 | ],
375 | "outputs": [
376 | {
377 | "name": "STRING",
378 | "type": "STRING",
379 | "links": [
380 | 46,
381 | 47,
382 | 49,
383 | 61
384 | ],
385 | "slot_index": 0,
386 | "shape": 3
387 | }
388 | ],
389 | "properties": {
390 | "Node name for S&R": "Bedrock - Claude"
391 | },
392 | "widgets_values": [
393 | "",
394 | "anthropic.claude-3-haiku-20240307-v1:0",
395 | 200,
396 | 0.501,
397 | 1,
398 | 250
399 | ]
400 | },
401 | {
402 | "id": 5,
403 | "type": "Text Multiline",
404 | "pos": {
405 | "0": 354,
406 | "1": 330
407 | },
408 | "size": {
409 | "0": 379.65625,
410 | "1": 198.87109375
411 | },
412 | "flags": {},
413 | "order": 0,
414 | "mode": 0,
415 | "inputs": [],
416 | "outputs": [
417 | {
418 | "name": "STRING",
419 | "type": "STRING",
420 | "links": [
421 | 17,
422 | 51
423 | ],
424 | "slot_index": 0,
425 | "shape": 3
426 | }
427 | ],
428 | "title": "Original Prompt in Chinese",
429 | "properties": {
430 | "Node name for S&R": "Text Multiline"
431 | },
432 | "widgets_values": [
433 | "一只小狗"
434 | ]
435 | },
436 | {
437 | "id": 23,
438 | "type": "Prompt Template",
439 | "pos": {
440 | "0": 820.4378051757812,
441 | "1": 369.5005798339844
442 | },
443 | "size": {
444 | "0": 376.08880615234375,
445 | "1": 212.24671936035156
446 | },
447 | "flags": {},
448 | "order": 1,
449 | "mode": 0,
450 | "inputs": [
451 | {
452 | "name": "prompt",
453 | "type": "STRING",
454 | "link": 17,
455 | "widget": {
456 | "name": "prompt"
457 | }
458 | }
459 | ],
460 | "outputs": [
461 | {
462 | "name": "STRING",
463 | "type": "STRING",
464 | "links": [
465 | 45
466 | ],
467 | "slot_index": 0,
468 | "shape": 3
469 | }
470 | ],
471 | "properties": {
472 | "Node name for S&R": "Prompt Template"
473 | },
474 | "widgets_values": [
475 | "",
476 | "Describe a beautiful picture in detail using one sentence of \"[prompt]\", please answer in English words only, skip the preamble."
477 | ]
478 | }
479 | ],
480 | "links": [
481 | [
482 | 17,
483 | 5,
484 | 0,
485 | 23,
486 | 0,
487 | "STRING"
488 | ],
489 | [
490 | 33,
491 | 29,
492 | 0,
493 | 27,
494 | 0,
495 | "IMAGE"
496 | ],
497 | [
498 | 45,
499 | 23,
500 | 0,
501 | 34,
502 | 0,
503 | "STRING"
504 | ],
505 | [
506 | 46,
507 | 34,
508 | 0,
509 | 8,
510 | 0,
511 | "STRING"
512 | ],
513 | [
514 | 47,
515 | 34,
516 | 0,
517 | 29,
518 | 0,
519 | "STRING"
520 | ],
521 | [
522 | 49,
523 | 34,
524 | 0,
525 | 36,
526 | 0,
527 | "STRING"
528 | ],
529 | [
530 | 50,
531 | 36,
532 | 0,
533 | 33,
534 | 0,
535 | "IMAGE"
536 | ],
537 | [
538 | 51,
539 | 5,
540 | 0,
541 | 37,
542 | 0,
543 | "STRING"
544 | ],
545 | [
546 | 52,
547 | 37,
548 | 0,
549 | 38,
550 | 0,
551 | "IMAGE"
552 | ],
553 | [
554 | 61,
555 | 34,
556 | 0,
557 | 51,
558 | 0,
559 | "STRING"
560 | ],
561 | [
562 | 62,
563 | 51,
564 | 0,
565 | 43,
566 | 0,
567 | "IMAGE"
568 | ]
569 | ],
570 | "groups": [
571 | {
572 | "title": "Text to Image without Prompt Refinement",
573 | "bounding": [
574 | 809,
575 | 1133,
576 | 802,
577 | 355
578 | ],
579 | "color": "#3f789e",
580 | "font_size": 24,
581 | "flags": {}
582 | },
583 | {
584 | "title": "Text to Image (Bedrock SDXL)",
585 | "bounding": [
586 | 1257,
587 | 299,
588 | 417,
589 | 803
590 | ],
591 | "color": "#a1309b",
592 | "font_size": 24,
593 | "flags": {}
594 | },
595 | {
596 | "title": "Text to Image (Bedrock TitanImage)",
597 | "bounding": [
598 | 1707,
599 | 303,
600 | 415,
601 | 803
602 | ],
603 | "color": "#88A",
604 | "font_size": 24,
605 | "flags": {}
606 | },
607 | {
608 | "title": "Prompt Refinement (Bedrock Claude)",
609 | "bounding": [
610 | 810,
611 | 295,
612 | 411,
613 | 813
614 | ],
615 | "color": "#b58b2a",
616 | "font_size": 24,
617 | "flags": {}
618 | },
619 | {
620 | "title": "Text to Image (Bedrock Amazon Nova Canvas)",
621 | "bounding": [
622 | 2153,
623 | 304,
624 | 516,
625 | 803
626 | ],
627 | "color": "#8A8",
628 | "font_size": 24,
629 | "flags": {}
630 | }
631 | ],
632 | "config": {},
633 | "extra": {
634 | "ds": {
635 | "scale": 1,
636 | "offset": [
637 | 0,
638 | 0
639 | ]
640 | },
641 | "workspace_info": {
642 | "id": "c350a89c-0edd-4bef-9fc6-f83c239228a8"
643 | }
644 | },
645 | "version": 0.4
646 | }
--------------------------------------------------------------------------------
/workflows/text2vid_luma_ray2.json:
--------------------------------------------------------------------------------
1 | {
2 | "last_node_id": 14,
3 | "last_link_id": 15,
4 | "nodes": [
5 | {
6 | "id": 9,
7 | "type": "PreviewImage",
8 | "pos": [
9 | 1080,
10 | 600
11 | ],
12 | "size": [
13 | 340,
14 | 246
15 | ],
16 | "flags": {},
17 | "order": 3,
18 | "mode": 0,
19 | "inputs": [
20 | {
21 | "name": "images",
22 | "type": "IMAGE",
23 | "link": 8
24 | }
25 | ],
26 | "outputs": [],
27 | "properties": {
28 | "Node name for S&R": "PreviewImage"
29 | },
30 | "widgets_values": []
31 | },
32 | {
33 | "id": 7,
34 | "type": "ShowText|pysssss",
35 | "pos": [
36 | 1080,
37 | 140
38 | ],
39 | "size": [
40 | 340,
41 | 100
42 | ],
43 | "flags": {},
44 | "order": 1,
45 | "mode": 0,
46 | "inputs": [
47 | {
48 | "name": "text",
49 | "type": "STRING",
50 | "link": 14,
51 | "widget": {
52 | "name": "text"
53 | }
54 | }
55 | ],
56 | "outputs": [
57 | {
58 | "name": "STRING",
59 | "type": "STRING",
60 | "links": [
61 | 12
62 | ],
63 | "slot_index": 0,
64 | "shape": 6
65 | }
66 | ],
67 | "properties": {
68 | "Node name for S&R": "ShowText|pysssss"
69 | },
70 | "widgets_values": [
71 | "",
72 | "/home/user/opt/ComfyUI/output/2025-02-16_14-32-30_0ch3x0afqras/0ch3x0afqras.mp4"
73 | ]
74 | },
75 | {
76 | "id": 11,
77 | "type": "VHS_VideoCombine",
78 | "pos": [
79 | 1460,
80 | 140
81 | ],
82 | "size": [
83 | 680,
84 | 695.25
85 | ],
86 | "flags": {},
87 | "order": 4,
88 | "mode": 0,
89 | "inputs": [
90 | {
91 | "name": "images",
92 | "type": "IMAGE",
93 | "link": 11
94 | },
95 | {
96 | "name": "audio",
97 | "type": "AUDIO",
98 | "link": null,
99 | "shape": 7
100 | },
101 | {
102 | "name": "meta_batch",
103 | "type": "VHS_BatchManager",
104 | "link": null,
105 | "shape": 7
106 | },
107 | {
108 | "name": "vae",
109 | "type": "VAE",
110 | "link": null,
111 | "shape": 7
112 | }
113 | ],
114 | "outputs": [
115 | {
116 | "name": "Filenames",
117 | "type": "VHS_FILENAMES",
118 | "links": null
119 | }
120 | ],
121 | "title": "Video Combine - VHS",
122 | "properties": {
123 | "Node name for S&R": "VHS_VideoCombine"
124 | },
125 | "widgets_values": {
126 | "frame_rate": 24,
127 | "loop_count": 0,
128 | "filename_prefix": "luma_",
129 | "format": "video/h264-mp4",
130 | "pix_fmt": "yuv420p",
131 | "crf": 20,
132 | "save_metadata": true,
133 | "pingpong": false,
134 | "save_output": false,
135 | "videopreview": {
136 | "hidden": false,
137 | "paused": false,
138 | "params": {
139 | "filename": "luma__00003.mp4",
140 | "subfolder": "",
141 | "type": "temp",
142 | "format": "video/h264-mp4",
143 | "frame_rate": 24
144 | },
145 | "muted": false
146 | }
147 | }
148 | },
149 | {
150 | "id": 8,
151 | "type": "VHS_LoadVideoPath",
152 | "pos": [
153 | 1080,
154 | 300
155 | ],
156 | "size": [
157 | 340,
158 | 238
159 | ],
160 | "flags": {},
161 | "order": 2,
162 | "mode": 0,
163 | "inputs": [
164 | {
165 | "name": "meta_batch",
166 | "type": "VHS_BatchManager",
167 | "link": null,
168 | "shape": 7
169 | },
170 | {
171 | "name": "vae",
172 | "type": "VAE",
173 | "link": null,
174 | "shape": 7
175 | },
176 | {
177 | "name": "video",
178 | "type": "STRING",
179 | "link": 12,
180 | "widget": {
181 | "name": "video"
182 | }
183 | }
184 | ],
185 | "outputs": [
186 | {
187 | "name": "IMAGE",
188 | "type": "IMAGE",
189 | "links": [
190 | 8,
191 | 11
192 | ],
193 | "slot_index": 0
194 | },
195 | {
196 | "name": "frame_count",
197 | "type": "INT",
198 | "links": [],
199 | "slot_index": 1
200 | },
201 | {
202 | "name": "audio",
203 | "type": "AUDIO",
204 | "links": null
205 | },
206 | {
207 | "name": "video_info",
208 | "type": "VHS_VIDEOINFO",
209 | "links": null
210 | }
211 | ],
212 | "title": "Load Video (Path) - VHS",
213 | "properties": {
214 | "Node name for S&R": "VHS_LoadVideoPath"
215 | },
216 | "widgets_values": {
217 | "video": "",
218 | "force_rate": 0,
219 | "force_size": "Disabled",
220 | "custom_width": 512,
221 | "custom_height": 512,
222 | "frame_load_cap": 0,
223 | "skip_first_frames": 0,
224 | "select_every_nth": 1,
225 | "videopreview": {
226 | "hidden": false,
227 | "paused": true,
228 | "params": {
229 | "force_rate": 0,
230 | "frame_load_cap": 0,
231 | "skip_first_frames": 0,
232 | "select_every_nth": 1,
233 | "filename": "",
234 | "type": "path",
235 | "format": "video/"
236 | },
237 | "muted": false
238 | }
239 | }
240 | },
241 | {
242 | "id": 5,
243 | "type": "Amazon Bedrock - Luma AI Ray Video",
244 | "pos": [
245 | 640,
246 | 140
247 | ],
248 | "size": [
249 | 400,
250 | 360
251 | ],
252 | "flags": {},
253 | "order": 0,
254 | "mode": 0,
255 | "inputs": [],
256 | "outputs": [
257 | {
258 | "name": "STRING",
259 | "type": "STRING",
260 | "links": [
261 | 14
262 | ],
263 | "slot_index": 0
264 | }
265 | ],
266 | "properties": {
267 | "Node name for S&R": "Amazon Bedrock - Luma AI Ray Video"
268 | },
269 | "widgets_values": [
270 | "A high-contrast, mature anime sequence of Yuji Itadori, explicitly styled like Ghost in the Shell 1995 anime. His pink hair and strong physique rendered in that iconic 90s anime cel-shading technique with sharp shadows and detailed muscle definition. @style classic 90s anime, Ghost in the Shell aesthetic with muted colors and dramatic shadow planes. His cursed energy appears as ethereal, digital glitches similar to the Major's thermoptic camouflage effects. The scene shows him in a brutalist concrete environment with deep puddles reflecting neon signs, heavy industrial atmosphere with steam vents and power lines crossing overhead. His Jujutsu Tech uniform reimagined with tactical urban gear and utility pouches like Section 9 operatives wear. Heavy film grain overlay and those signature deep shadow pools of 90s anime create a gritty, serious atmosphere while his determined eyes reflect the neon glow from above",
271 | "16:9",
272 | "720p",
273 | "9s",
274 | "comfyui-workflow-outputs",
275 | "False"
276 | ]
277 | }
278 | ],
279 | "links": [
280 | [
281 | 8,
282 | 8,
283 | 0,
284 | 9,
285 | 0,
286 | "IMAGE"
287 | ],
288 | [
289 | 11,
290 | 8,
291 | 0,
292 | 11,
293 | 0,
294 | "IMAGE"
295 | ],
296 | [
297 | 12,
298 | 7,
299 | 0,
300 | 8,
301 | 2,
302 | "STRING"
303 | ],
304 | [
305 | 14,
306 | 5,
307 | 0,
308 | 7,
309 | 0,
310 | "STRING"
311 | ]
312 | ],
313 | "groups": [],
314 | "config": {},
315 | "extra": {
316 | "ds": {
317 | "scale": 1,
318 | "offset": {
319 | "0": -590.8984375,
320 | "1": -48.78125
321 | }
322 | },
323 | "ue_links": []
324 | },
325 | "version": 0.4
326 | }
--------------------------------------------------------------------------------
/workflows/text2vid_nova_reel.json:
--------------------------------------------------------------------------------
1 | {
2 | "last_node_id": 17,
3 | "last_link_id": 17,
4 | "nodes": [
5 | {
6 | "id": 9,
7 | "type": "PreviewImage",
8 | "pos": [
9 | 1080,
10 | 600
11 | ],
12 | "size": [
13 | 340,
14 | 246
15 | ],
16 | "flags": {},
17 | "order": 3,
18 | "mode": 0,
19 | "inputs": [
20 | {
21 | "name": "images",
22 | "type": "IMAGE",
23 | "link": 8
24 | }
25 | ],
26 | "outputs": [],
27 | "properties": {
28 | "Node name for S&R": "PreviewImage"
29 | },
30 | "widgets_values": []
31 | },
32 | {
33 | "id": 7,
34 | "type": "ShowText|pysssss",
35 | "pos": [
36 | 1080,
37 | 140
38 | ],
39 | "size": [
40 | 340,
41 | 100
42 | ],
43 | "flags": {},
44 | "order": 1,
45 | "mode": 0,
46 | "inputs": [
47 | {
48 | "name": "text",
49 | "type": "STRING",
50 | "link": 17,
51 | "widget": {
52 | "name": "text"
53 | }
54 | }
55 | ],
56 | "outputs": [
57 | {
58 | "name": "STRING",
59 | "type": "STRING",
60 | "links": [
61 | 12
62 | ],
63 | "slot_index": 0,
64 | "shape": 6
65 | }
66 | ],
67 | "properties": {
68 | "Node name for S&R": "ShowText|pysssss"
69 | },
70 | "widgets_values": [
71 | "",
72 | "/home/user/ComfyUI/output/2025-02-16_15-45-45_73a3j2ki0ql4/73a3j2ki0ql4.mp4"
73 | ]
74 | },
75 | {
76 | "id": 8,
77 | "type": "VHS_LoadVideoPath",
78 | "pos": [
79 | 1080,
80 | 300
81 | ],
82 | "size": [
83 | 340,
84 | 238
85 | ],
86 | "flags": {},
87 | "order": 2,
88 | "mode": 0,
89 | "inputs": [
90 | {
91 | "name": "meta_batch",
92 | "type": "VHS_BatchManager",
93 | "link": null,
94 | "shape": 7
95 | },
96 | {
97 | "name": "vae",
98 | "type": "VAE",
99 | "link": null,
100 | "shape": 7
101 | },
102 | {
103 | "name": "video",
104 | "type": "STRING",
105 | "link": 12,
106 | "widget": {
107 | "name": "video"
108 | }
109 | }
110 | ],
111 | "outputs": [
112 | {
113 | "name": "IMAGE",
114 | "type": "IMAGE",
115 | "links": [
116 | 8,
117 | 11
118 | ],
119 | "slot_index": 0
120 | },
121 | {
122 | "name": "frame_count",
123 | "type": "INT",
124 | "links": [],
125 | "slot_index": 1
126 | },
127 | {
128 | "name": "audio",
129 | "type": "AUDIO",
130 | "links": null
131 | },
132 | {
133 | "name": "video_info",
134 | "type": "VHS_VIDEOINFO",
135 | "links": null
136 | }
137 | ],
138 | "title": "Load Video (Path) - VHS",
139 | "properties": {
140 | "Node name for S&R": "VHS_LoadVideoPath"
141 | },
142 | "widgets_values": {
143 | "video": "",
144 | "force_rate": 0,
145 | "force_size": "Disabled",
146 | "custom_width": 512,
147 | "custom_height": 512,
148 | "frame_load_cap": 0,
149 | "skip_first_frames": 0,
150 | "select_every_nth": 1,
151 | "videopreview": {
152 | "hidden": false,
153 | "paused": true,
154 | "params": {
155 | "force_rate": 0,
156 | "frame_load_cap": 0,
157 | "skip_first_frames": 0,
158 | "select_every_nth": 1,
159 | "filename": "",
160 | "type": "path",
161 | "format": "video/"
162 | },
163 | "muted": false
164 | }
165 | }
166 | },
167 | {
168 | "id": 11,
169 | "type": "VHS_VideoCombine",
170 | "pos": [
171 | 1460,
172 | 140
173 | ],
174 | "size": [
175 | 680,
176 | 695.25
177 | ],
178 | "flags": {},
179 | "order": 4,
180 | "mode": 0,
181 | "inputs": [
182 | {
183 | "name": "images",
184 | "type": "IMAGE",
185 | "link": 11
186 | },
187 | {
188 | "name": "audio",
189 | "type": "AUDIO",
190 | "link": null,
191 | "shape": 7
192 | },
193 | {
194 | "name": "meta_batch",
195 | "type": "VHS_BatchManager",
196 | "link": null,
197 | "shape": 7
198 | },
199 | {
200 | "name": "vae",
201 | "type": "VAE",
202 | "link": null,
203 | "shape": 7
204 | }
205 | ],
206 | "outputs": [
207 | {
208 | "name": "Filenames",
209 | "type": "VHS_FILENAMES",
210 | "links": null
211 | }
212 | ],
213 | "title": "Video Combine - VHS",
214 | "properties": {
215 | "Node name for S&R": "VHS_VideoCombine"
216 | },
217 | "widgets_values": {
218 | "frame_rate": 24,
219 | "loop_count": 0,
220 | "filename_prefix": "luma_",
221 | "format": "video/h264-mp4",
222 | "pix_fmt": "yuv420p",
223 | "crf": 20,
224 | "save_metadata": true,
225 | "pingpong": false,
226 | "save_output": false,
227 | "videopreview": {
228 | "hidden": false,
229 | "paused": false,
230 | "params": {
231 | "filename": "luma__00001.mp4",
232 | "subfolder": "",
233 | "type": "temp",
234 | "format": "video/h264-mp4",
235 | "frame_rate": 24
236 | },
237 | "muted": false
238 | }
239 | }
240 | },
241 | {
242 | "id": 17,
243 | "type": "Amazon Bedrock - Nova Reel Video",
244 | "pos": [
245 | 640,
246 | 140
247 | ],
248 | "size": [
249 | 400,
250 | 300
251 | ],
252 | "flags": {},
253 | "order": 0,
254 | "mode": 0,
255 | "inputs": [
256 | {
257 | "name": "image",
258 | "type": "IMAGE",
259 | "link": null,
260 | "shape": 7
261 | }
262 | ],
263 | "outputs": [
264 | {
265 | "name": "STRING",
266 | "type": "STRING",
267 | "links": [
268 | 17
269 | ],
270 | "slot_index": 0
271 | }
272 | ],
273 | "properties": {
274 | "Node name for S&R": "Amazon Bedrock - Nova Reel Video"
275 | },
276 | "widgets_values": [
277 | "A high-contrast, mature anime sequence of Yuji Itadori, explicitly styled like Ghost in the Shell 1995 anime. His pink hair and strong physique rendered in that iconic 90s anime cel-shading technique with sharp shadows and detailed muscle definition. @style classic 90s anime, Ghost in the Shell aesthetic with muted colors and dramatic shadow planes. His cursed energy appears as ethereal, digital glitches similar to the Major's thermoptic camouflage effects.",
278 | "1280x720",
279 | 582249213,
280 | "randomize",
281 | "comfyui-workflow-outputs"
282 | ]
283 | }
284 | ],
285 | "links": [
286 | [
287 | 8,
288 | 8,
289 | 0,
290 | 9,
291 | 0,
292 | "IMAGE"
293 | ],
294 | [
295 | 11,
296 | 8,
297 | 0,
298 | 11,
299 | 0,
300 | "IMAGE"
301 | ],
302 | [
303 | 12,
304 | 7,
305 | 0,
306 | 8,
307 | 2,
308 | "STRING"
309 | ],
310 | [
311 | 17,
312 | 17,
313 | 0,
314 | 7,
315 | 0,
316 | "STRING"
317 | ]
318 | ],
319 | "groups": [],
320 | "config": {},
321 | "extra": {
322 | "ds": {
323 | "scale": 1,
324 | "offset": {
325 | "0": -368.74609375,
326 | "1": 6.12109375
327 | }
328 | },
329 | "ue_links": []
330 | },
331 | "version": 0.4
332 | }
--------------------------------------------------------------------------------