├── .gitignore ├── README.md ├── __init__.py ├── node └── translator.py └── ui.jpg /.gitignore: -------------------------------------------------------------------------------- 1 | __pycache__ 2 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # ComfyUI-Prompt-Translator 2 | ComfyUI 通过语言模型自动翻译 Prompt 为中文提示词插件。 3 | 它基于模型 [facebook/mbart-large-50-many-to-many-mmt](https://huggingface.co/facebook/mbart-large-50-many-to-many-mmt),使用了 Hugging Face 的 Transformers 库来实现翻译功能。 4 | 该翻译插件不需要联网翻译,只需要下载翻译模型就可以正常工作。 5 | 6 | ## 安装插件 7 | ``` 8 | cd ComfyUI/custom_nodes 9 | git clone https://github.com/MofaAI/ComfyUI-Prompt-Translator.git 10 | ``` 11 | 12 | ## 语言模型下载 13 | ``` 14 | pip install -U huggingface_hub hf_transfer 15 | export HF_ENDPOINT=https://hf-mirror.com 16 | huggingface-cli download --resume-download facebook/mbart-large-50-many-to-many-mmt 17 | ``` 18 | 19 | ## 使用效果 20 | ![使用效果](ui.jpg) 21 | 22 | 23 | -------------------------------------------------------------------------------- /__init__.py: -------------------------------------------------------------------------------- 1 | from .node.translator import * 2 | 3 | __version__ = "1.0.0" 4 | 5 | NODE_CLASS_MAPPINGS = { 6 | "PromptTextTranslation": PromptTextTranslation, 7 | } 8 | 9 | NODE_DISPLAY_NAME_MAPPINGS = { 10 | "PromptTextTranslation": "文本翻译", 11 | } 12 | 13 | __all__ = ['NODE_CLASS_MAPPINGS', 'NODE_DISPLAY_NAME_MAPPINGS'] 14 | -------------------------------------------------------------------------------- /node/translator.py: -------------------------------------------------------------------------------- 1 | import json 2 | import re 3 | import os 4 | from transformers import MBartForConditionalGeneration, MBart50TokenizerFast 5 | 6 | model = MBartForConditionalGeneration.from_pretrained("facebook/mbart-large-50-many-to-many-mmt") 7 | tokenizer = MBart50TokenizerFast.from_pretrained("facebook/mbart-large-50-many-to-many-mmt") 8 | tokenizer.src_lang = "zh_CN" 9 | 10 | def translate(text): 11 | try: 12 | encoded = tokenizer(text, return_tensors="pt") 13 | generated_tokens = model.generate( 14 | **encoded, 15 | forced_bos_token_id=tokenizer.lang_code_to_id["en_XX"] 16 | ) 17 | return tokenizer.batch_decode(generated_tokens, skip_special_tokens=True)[0] 18 | except TencentCloudSDKException as err: 19 | print("文本翻译错误:" + err) 20 | return text 21 | 22 | 23 | def contains_chinese(text): 24 | pattern = re.compile(r'[\u4e00-\u9fa5]') 25 | return bool(pattern.search(text)) 26 | 27 | 28 | class PromptTextTranslation: 29 | @classmethod 30 | def INPUT_TYPES(s): 31 | return { 32 | "required": { 33 | "text_trans": ("STRING", {"multiline": True, "default": "海边,日出"}), 34 | "text_normal": ("STRING", {"multiline": True}), 35 | "trans_switch": (["enabled", "disabled"],), 36 | }, 37 | } 38 | 39 | RETURN_TYPES = ("STRING",) 40 | FUNCTION = "translation" 41 | CATEGORY = "utils" 42 | 43 | def translation(self, text_trans, text_normal, trans_switch, ): 44 | 45 | if text_trans == "undefined": 46 | text_trans = "" 47 | if text_normal == "undefined": 48 | text_normal = "" 49 | 50 | target_text = "" 51 | 52 | print("prompt: ", text_trans, text_normal) 53 | 54 | if trans_switch == "enabled" and contains_chinese(text_trans): 55 | target_text = translate(text_trans) 56 | else: 57 | target_text = text_trans 58 | 59 | print("translated: " + target_text) 60 | 61 | output_text = ", ".join(filter(None, [target_text, text_normal])) 62 | output_text = output_text.replace(',', ',').replace('。', ',').replace(" ", " ").replace(" ,", ",").replace(",,", ",") 63 | 64 | print("target: " + target_text) 65 | 66 | return (output_text,) 67 | -------------------------------------------------------------------------------- /ui.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MofaAI/ComfyUI-Prompt-Translator/cf31713673150e97f1ae835af0a95eb0133b5470/ui.jpg --------------------------------------------------------------------------------