├── SubtitleTranslate - ollama英译中.ico
├── README.md
├── LICENSE
└── SubtitleTranslate - ollama英译中.as
/SubtitleTranslate - ollama英译中.ico:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/yxyxyz6/PotPlayer_ollama_Translate/HEAD/SubtitleTranslate - ollama英译中.ico
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 |
更新了V3英译中版本,直接releases下载压缩包即可。解决了和Felix3322作者插件之间的变量名冲突
2 |
更新了V2英译中版本,直接releases下载压缩包即可。优化了提示词,联系上下文
3 |
参考
4 |
https://github.com/Felix3322/PotPlayer_Chatgpt_Translate
5 |
的项目进行修改。
6 |
下载zip,解压到potplayer\Extension\Subtitle\Translate文件夹下。
7 |
我使用的是wangshenzhi/gemma2-9b-chinese-chat:latest模型,需要更改的,
8 |
可以自行用记事本打开SubtitleTranslate - ollama.as文件,修改对应模型位置
9 |
使用视频教程可以参考:
10 |
B站-复变的兔子洞:https://www.bilibili.com/video/BV1XBqyYLE7y/?spm_id_from=333.1387.favlist.content.click&vd_source=d9367c081bd4aa6b79e1f7590071a87e
11 |
B站-恩赐Cadou:https://www.bilibili.com/video/BV1vcCgYNEZV/?spm_id_from=333.1007.top_right_bar_window_history.content.click&vd_source=d9367c081bd4aa6b79e1f7590071a87e
12 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | MIT License
2 |
3 | Copyright (c) 2024 Felix
4 | Copyright (c) 2024 yxyxyz6
5 |
6 | Permission is hereby granted, free of charge, to any person obtaining a copy
7 | of this software and associated documentation files (the "Software"), to deal
8 | in the Software without restriction, including without limitation the rights
9 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
10 | copies of the Software, and to permit persons to whom the Software is
11 | furnished to do so, subject to the following conditions:
12 |
13 | The above copyright notice and this permission notice shall be included in all
14 | copies or substantial portions of the Software.
15 |
16 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
19 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
20 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
21 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
22 | SOFTWARE.
23 |
--------------------------------------------------------------------------------
/SubtitleTranslate - ollama英译中.as:
--------------------------------------------------------------------------------
1 | /*
2 | Real-time subtitle translation for PotPlayer using ollama API
3 | */
4 |
5 | // 插件信息函数
6 | string GetTitle() {
7 | return "{$CP949=本地 AI 번역$}{$CP950=本地 AI 翻譯$}{$CP0=ollama 英译中$}";
8 | }
9 |
10 | string GetVersion() {
11 | return "1.6";
12 | }
13 |
14 | string GetDesc() {
15 | return "{$CP949=本地 AI를 사용한 실시간 자막 번역$}{$CP950=使用本地 AI 的實時字幕翻譯$}{$CP0=Real-time subtitle translation using Local AI$}";
16 | }
17 |
18 | string GetLoginTitle() {
19 | return "{$CP949=本地 AI 모델 구성$}{$CP950=本地 AI 模型配置$}{$CP0=Local AI Model Configuration$}";
20 | }
21 |
22 | string GetLoginDesc() {
23 | return "{$CP949=모델 이름을 입력하십시오 (예: wangshenzhi/gemma2-9b-chinese-chat:latest 或 isotr0py/sakura-13b-qwen2beta-v0.10pre0-q6_k:latest).$}{$CP950=請輸入模型名稱(例如 wangshenzhi/gemma2-9b-chinese-chat:latest 或 isotr0py/sakura-13b-qwen2beta-v0.10pre0-q6_k:latest)。$}{$CP0=Please enter the model name (e.g., wangshenzhi/gemma2-9b-chinese-chat:latest or isotr0py/sakura-13b-qwen2beta-v0.10pre0-q6_k:latest).$}";
24 | }
25 |
26 | string GetUserText() {
27 | return "{$CP949=모델 이름 (현 : " + selected_model + ")$}{$CP950=模型名稱 (目前: " + selected_model + ")$}{$CP0=Model Name (Current: " + selected_model + ")$}";
28 | }
29 |
30 | string GetPasswordText() {
31 | return "{$CP949=API 키:$}{$CP950=API 金鑰:$}{$CP0=API Key:$}";
32 | }
33 |
34 | // 全局变量
35 | string DEFAULT_MODEL_NAME = "wangshenzhi/gemma2-9b-chinese-chat:latest";
36 | string api_key = "";
37 | string selected_model = DEFAULT_MODEL_NAME; // 默认使用第一个模型
38 | string UserAgent = "Mozilla/5.0 (Windows NT 10.0; Win64; x64)";
39 | string api_url = "http://127.0.0.1:11434/v1/chat/completions"; // 新增本地API地址
40 | string api_url_base = "http://127.0.0.1:11434";
41 | string context = "";
42 |
43 | // 支持的语言列表
44 | array LangTable =
45 | {
46 | "Auto", "af", "sq", "am", "ar", "hy", "az", "eu", "be", "bn", "bs", "bg", "ca",
47 | "ceb", "ny", "zh-CN",
48 | "zh-TW", "co", "hr", "cs", "da", "nl", "en", "eo", "et", "tl", "fi", "fr",
49 | "fy", "gl", "ka", "de", "el", "gu", "ht", "ha", "haw", "he", "hi", "hmn", "hu", "is", "ig", "id", "ga", "it", "ja", "jw", "kn", "kk", "km",
50 | "ko", "ku", "ky", "lo", "la", "lv", "lt", "lb", "mk", "ms", "mg", "ml", "mt", "mi", "mr", "mn", "my", "ne", "no", "ps", "fa", "pl", "pt",
51 | "pa", "ro", "ru", "sm", "gd", "sr", "st", "sn", "sd", "si", "sk", "sl", "so", "es", "su", "sw", "sv", "tg", "ta", "te", "th", "tr", "uk",
52 | "ur", "uz", "vi", "cy", "xh", "yi", "yo", "zu"
53 | };
54 |
55 | // 获取源语言列表
56 | array GetSrcLangs() {
57 | array ret = LangTable;
58 | return ret;
59 | }
60 |
61 | // 获取目标语言列表
62 | array GetDstLangs() {
63 | array ret = LangTable;
64 | return ret;
65 | }
66 |
67 | // 登录接口,用于输入模型名称和 API Key
68 | string ServerLogin(string User, string Pass) {
69 | // 去除首尾空格
70 | selected_model = User.Trim();
71 | api_key = Pass.Trim();
72 |
73 | selected_model.MakeLower();
74 |
75 | array names = GetOllamaModelNames();
76 |
77 | // 验证模型名称是否为空或是否为支持的模型
78 | if (selected_model.empty()) {
79 | HostPrintUTF8("{$CP949=모델 이름이 입력되지 않았습니다. 유효한 모델 이름을 입력하십시오.$}{$CP950=模型名稱未輸入,請輸入有效的模型名稱。$}{$CP0=Model name not entered. Please enter a valid model name.$}\n");
80 | selected_model = DEFAULT_MODEL_NAME; // 使用默认模型
81 | }
82 |
83 | int modelscount = names.size();
84 | if (modelscount == 0){
85 | return "Ollama未返回有效的模型名称数据,请确认Ollama是否已运行或已有下载的模型。Ollama did not return valid model name data. Please confirm whether Ollama is running or has any downloaded models.";
86 | }
87 | bool matched = false;
88 | for (int i = 0; i < modelscount; i++){
89 | if (selected_model == names[i]){
90 | matched = true;
91 | break;
92 | }
93 | }
94 | if (!matched){
95 | HostPrintUTF8("{$CP949=지원되지 않는 모델입니다. 지원되는 모델을 입력하십시오.$}{$CP950=不支援的模 , 輸入支援的模型。$}{$CP0=Unsupported model. Please enter a supported model.$}\n");
96 | return "未从Ollama中找到模型:" + selected_model;
97 | }
98 |
99 | // 保存设置到临时存储
100 | HostSaveString("api_key_ollama", api_key);
101 | HostSaveString("selected_model_ollama", selected_model);
102 |
103 | HostPrintUTF8("{$CP949=API 키와 모델 이름이 성공적으로 설정되었습니다.$}{$CP950=API 金鑰與模型名稱已成功配置。$}{$CP0=API Key and model name successfully configured.$}\n");
104 | return "200 ok";
105 | }
106 |
107 | // 登出接口,清除模型名称和 API Key
108 | void ServerLogout() {
109 | api_key = "";
110 | selected_model = DEFAULT_MODEL_NAME; // 重置为默认模型
111 | HostSaveString("api_key_ollama", "");
112 | HostSaveString("selected_model_ollama", selected_model);
113 | HostPrintUTF8("{$CP949=성공적으로 로그아웃되었습니다.$}{$CP950=已成功登出。$}{$CP0=Successfully logged out.$}\n");
114 | }
115 |
116 | // JSON 字符串转义函数
117 | string JsonEscape(const string &in input) {
118 | string output = input;
119 | output.replace("\\", "\\\\");
120 | output.replace("\"", "\\\"");
121 | output.replace("\n", "\\n");
122 | output.replace("\r", "\\r");
123 | output.replace("\t", "\\t");
124 | return output;
125 | }
126 |
127 | // 翻译函数
128 | string Translate(string Text, string &in SrcLang, string &in DstLang) {
129 | // 从临时存储中加载模型名称
130 | selected_model = HostLoadString("selected_model_ollama", "wangshenzhi/gemma2-9b-chinese-chat:latest");
131 |
132 | if (DstLang.empty() || DstLang == "{$CP949=자동 감지$}{$CP950=自動檢測$}{$CP0=Auto Detect$}") {
133 | HostPrintUTF8("{$CP949=목표 언어가 지정되지 않았습니다.$}{$CP950=目標語言未指定。$}{$CP0=Target language not specified.$}\n");
134 | return "";
135 | }
136 |
137 | string UNICODE_RLE = "\u202B";
138 |
139 | if (SrcLang.empty() || SrcLang == "{$CP949=자동 감지$}{$CP950=自動檢測$}{$CP0=Auto Detect$}") {
140 | SrcLang = "";
141 | }
142 |
143 | // 构建提示词
144 | string prompt = "你是一名专业翻译。请将以下字幕文本翻译";
145 | if (!SrcLang.empty()) {
146 | prompt += "从" + SrcLang;
147 | }
148 | prompt += "翻译成" + DstLang + "。请注意:\n";
149 | prompt += "1. 保持原文的语气和风格\n";
150 | prompt += "2. 确保译文与上下文保持连贯\n";
151 | prompt += "3. 只需提供译文,无需解释\n\n";
152 |
153 | if (!context.empty()) {
154 | prompt += "上下文参考:\n'''\n" + context + "\n'''\n\n";
155 | }
156 | prompt += "待翻译文本:\n'''\n" + Text + "\n'''";
157 |
158 | // JSON 转义
159 | string escapedPrompt = JsonEscape(prompt);
160 |
161 | // 构建请求数据
162 | string requestData = "{\"model\":\"" + selected_model + "\",\"messages\":[{\"role\":\"user\",\"content\":\"" + escapedPrompt + "\"}]}";
163 | string headers = "Content-Type: application/json";
164 |
165 | // 发送请求
166 | string response = HostUrlGetString(api_url, UserAgent, headers, requestData);
167 | if (response.empty()) {
168 | HostPrintUTF8("{$CP949=번역 요청이 실패했습니다.$}{$CP950=翻譯請求失敗。$}{$CP0=Translation request failed.$}\n");
169 | return "";
170 | }
171 |
172 | // 解析响应
173 | JsonReader Reader;
174 | JsonValue Root;
175 | if (!Reader.parse(response, Root)) {
176 | HostPrintUTF8("{$CP949=API 응답을 분석하지 못했습니다.$}{$CP950=無法解析 API 回應。$}{$CP0=Failed to parse API response.$}\n");
177 | return "";
178 | }
179 |
180 | JsonValue choices = Root["choices"];
181 | if (choices.isArray() && choices[0]["message"]["content"].isString()) {
182 | string translatedText = choices[0]["message"]["content"].asString();
183 | if (DstLang == "fa" || DstLang == "ar" || DstLang == "he") {
184 | translatedText = UNICODE_RLE + translatedText;
185 | }
186 | SrcLang = "UTF8";
187 | DstLang = "UTF8";
188 | return translatedText;
189 | }
190 |
191 | HostPrintUTF8("{$CP949=번역이 실패했습니다.$}{$CP950=翻譯失敗。$}{$CP0=Translation failed.$}\n");
192 | return "";
193 | }
194 |
195 | // 插件初始化
196 | void OnInitialize() {
197 | HostPrintUTF8("{$CP949=ollama 번역 플러그인이 로드되었습니다.$}{$CP950=ollama 翻譯插件已加載。$}{$CP0=ollama translation plugin loaded.$}\n");
198 | // 从临时存储中加载模型名称和 API Key(如果已保存),使用新的键名
199 | api_key = HostLoadString("api_key_ollama", "");
200 | selected_model = HostLoadString("selected_model_ollama", "wangshenzhi/gemma2-9b-chinese-chat:latest");
201 | if (!api_key.empty()) {
202 | HostPrintUTF8("{$CP949=저장된 API 키와 모델 이름이 로드되었습니다.$}{$CP950=已加載保存的 API 金鑰與模型名稱。$}{$CP0=Saved API Key and model name loaded.$}\n");
203 | }
204 | }
205 |
206 | // 插件结束
207 | void OnFinalize() {
208 | HostPrintUTF8("{$CP949=ollama 번역 플러그인이 언로드되었습니다.$}{$CP950=ollama 翻譯插件已卸載。$}{$CP0=ollama translation plugin unloaded.$}\n");
209 | }
210 |
211 | array GetOllamaModelNames(){
212 | string url = api_url_base + "/api/tags";
213 | string headers = "Content-Type: application/json";
214 | string resp = HostUrlGetString(url,UserAgent, headers, "");
215 | JsonReader reader;
216 | JsonValue root;
217 | if (!reader.parse(resp, root)){
218 | HostPrintUTF8("{$CP0=Failed to parse the list of the deployed models from Ollama.$}{$CP936=解析Ollama本地部署模型名称列表时失败:无法解析json。}");
219 | array empty;
220 | return empty;
221 | }
222 | JsonValue models = root["models"];
223 | int count = models.size();
224 | int i = 0;
225 | array res;
226 | for (i=0 ; i