├── .gitignore
├── Assets
├── Logo.png
├── LogoWide.png
├── SmallLogo.png
└── Logo.svg
├── LlmExtension
├── Assets
│ ├── LlmExtensionLogo.png
│ ├── BadgeLogo.scale-400.png
│ ├── StoreLogo.scale-400.png
│ ├── SmallLlmExtensionLogo.png
│ ├── Square44x44Logo.scale-400.png
│ ├── Square71x71Logo.scale-400.png
│ ├── Wide310x150Logo.scale-400.png
│ ├── Square150x150Logo.scale-400.png
│ └── Square310x310Logo.scale-400.png
├── LlmExtension_TemporaryKey.pfx
├── Properties
│ ├── launchSettings.json
│ └── PublishProfiles
│ │ ├── win-x64.pubxml
│ │ └── win-arm64.pubxml
├── LlmExtensionCommandsProvider.cs
├── app.manifest
├── LlmExtension.cs
├── Program.cs
├── Package.appxmanifest
├── LlmExtension.csproj
├── Package.StoreAssociation.xml
└── Pages
│ └── LlmExtensionPage.cs
├── nuget.config
├── Directory.Build.props
├── LICENSE
├── README.md
├── Directory.Packages.props
└── LlmExtension.sln
/.gitignore:
--------------------------------------------------------------------------------
1 | /LlmExtension/bin
2 | /LlmExtension/obj
3 | /Backup
4 | /.vs
5 | **.user
--------------------------------------------------------------------------------
/Assets/Logo.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/LioQing/llm-extension-for-cmd-pal/HEAD/Assets/Logo.png
--------------------------------------------------------------------------------
/Assets/LogoWide.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/LioQing/llm-extension-for-cmd-pal/HEAD/Assets/LogoWide.png
--------------------------------------------------------------------------------
/Assets/SmallLogo.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/LioQing/llm-extension-for-cmd-pal/HEAD/Assets/SmallLogo.png
--------------------------------------------------------------------------------
/LlmExtension/Assets/LlmExtensionLogo.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/LioQing/llm-extension-for-cmd-pal/HEAD/LlmExtension/Assets/LlmExtensionLogo.png
--------------------------------------------------------------------------------
/LlmExtension/LlmExtension_TemporaryKey.pfx:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/LioQing/llm-extension-for-cmd-pal/HEAD/LlmExtension/LlmExtension_TemporaryKey.pfx
--------------------------------------------------------------------------------
/LlmExtension/Assets/BadgeLogo.scale-400.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/LioQing/llm-extension-for-cmd-pal/HEAD/LlmExtension/Assets/BadgeLogo.scale-400.png
--------------------------------------------------------------------------------
/LlmExtension/Assets/StoreLogo.scale-400.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/LioQing/llm-extension-for-cmd-pal/HEAD/LlmExtension/Assets/StoreLogo.scale-400.png
--------------------------------------------------------------------------------
/LlmExtension/Assets/SmallLlmExtensionLogo.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/LioQing/llm-extension-for-cmd-pal/HEAD/LlmExtension/Assets/SmallLlmExtensionLogo.png
--------------------------------------------------------------------------------
/LlmExtension/Assets/Square44x44Logo.scale-400.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/LioQing/llm-extension-for-cmd-pal/HEAD/LlmExtension/Assets/Square44x44Logo.scale-400.png
--------------------------------------------------------------------------------
/LlmExtension/Assets/Square71x71Logo.scale-400.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/LioQing/llm-extension-for-cmd-pal/HEAD/LlmExtension/Assets/Square71x71Logo.scale-400.png
--------------------------------------------------------------------------------
/LlmExtension/Assets/Wide310x150Logo.scale-400.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/LioQing/llm-extension-for-cmd-pal/HEAD/LlmExtension/Assets/Wide310x150Logo.scale-400.png
--------------------------------------------------------------------------------
/LlmExtension/Assets/Square150x150Logo.scale-400.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/LioQing/llm-extension-for-cmd-pal/HEAD/LlmExtension/Assets/Square150x150Logo.scale-400.png
--------------------------------------------------------------------------------
/LlmExtension/Assets/Square310x310Logo.scale-400.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/LioQing/llm-extension-for-cmd-pal/HEAD/LlmExtension/Assets/Square310x310Logo.scale-400.png
--------------------------------------------------------------------------------
/LlmExtension/Properties/launchSettings.json:
--------------------------------------------------------------------------------
1 | {
2 | "profiles": {
3 | "LlmExtension (Package)": {
4 | "commandName": "MsixPackage",
5 | "doNotLaunchApp": true
6 | },
7 | "LlmExtension (Unpackaged)": {
8 | "commandName": "Project"
9 | }
10 | }
11 | }
12 |
--------------------------------------------------------------------------------
/nuget.config:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
--------------------------------------------------------------------------------
/Directory.Build.props:
--------------------------------------------------------------------------------
1 |
2 |
3 | x64;ARM64
4 | true
5 | Recommended
6 | <_SkipUpgradeNetAnalyzersNuGetWarning>true
7 | direct
8 | $(Platform)
9 |
10 |
11 |
--------------------------------------------------------------------------------
/LlmExtension/Properties/PublishProfiles/win-x64.pubxml:
--------------------------------------------------------------------------------
1 |
2 |
5 |
6 |
7 | FileSystem
8 | x64
9 | win-x64
10 | bin\$(Configuration)\$(TargetFramework)\$(RuntimeIdentifier)\publish\
11 | true
12 | False
13 | True
14 |
15 |
16 |
--------------------------------------------------------------------------------
/LlmExtension/Properties/PublishProfiles/win-arm64.pubxml:
--------------------------------------------------------------------------------
1 |
2 |
5 |
6 |
7 | FileSystem
8 | ARM64
9 | win-arm64
10 | bin\$(Configuration)\$(TargetFramework)\$(RuntimeIdentifier)\publish\
11 | true
12 | False
13 | True
14 |
15 |
16 |
--------------------------------------------------------------------------------
/LlmExtension/LlmExtensionCommandsProvider.cs:
--------------------------------------------------------------------------------
1 | // Copyright (c) Microsoft Corporation
2 | // The Microsoft Corporation licenses this file to you under the MIT license.
3 | // See the LICENSE file in the project root for more information.
4 |
5 | using Microsoft.CommandPalette.Extensions;
6 | using Microsoft.CommandPalette.Extensions.Toolkit;
7 |
8 | namespace LlmExtension;
9 |
10 | public partial class LlmExtensionCommandsProvider : CommandProvider
11 | {
12 | private readonly ICommandItem[] _commands;
13 |
14 | public LlmExtensionCommandsProvider()
15 | {
16 | DisplayName = "LLM Extension";
17 | Icon = IconHelpers.FromRelativePath("Assets\\LlmExtensionLogo.png");
18 | _commands = [
19 | new CommandItem(new LlmExtensionPage()),
20 | ];
21 | }
22 |
23 | public override ICommandItem[] TopLevelCommands()
24 | {
25 | return _commands;
26 | }
27 |
28 | }
29 |
--------------------------------------------------------------------------------
/LlmExtension/app.manifest:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
10 |
11 |
12 |
13 |
14 |
15 |
16 | PerMonitorV2
17 |
18 |
19 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | MIT License
2 |
3 | Copyright (c) 2025 Lio Qing
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy
6 | of this software and associated documentation files (the "Software"), to deal
7 | in the Software without restriction, including without limitation the rights
8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 | copies of the Software, and to permit persons to whom the Software is
10 | furnished to do so, subject to the following conditions:
11 |
12 | The above copyright notice and this permission notice shall be included in all
13 | copies or substantial portions of the Software.
14 |
15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21 | SOFTWARE.
22 |
--------------------------------------------------------------------------------
/LlmExtension/LlmExtension.cs:
--------------------------------------------------------------------------------
1 | // Copyright (c) Microsoft Corporation
2 | // The Microsoft Corporation licenses this file to you under the MIT license.
3 | // See the LICENSE file in the project root for more information.
4 |
5 | using System;
6 | using System.Runtime.InteropServices;
7 | using System.Threading;
8 | using Microsoft.CommandPalette.Extensions;
9 |
10 | namespace LlmExtension;
11 |
12 | [Guid("19b8294e-b4a6-4e5c-a763-55fee4a09a1c")]
13 | public sealed partial class LlmExtension : IExtension, IDisposable
14 | {
15 | private readonly ManualResetEvent _extensionDisposedEvent;
16 |
17 | private readonly LlmExtensionCommandsProvider _provider = new();
18 |
19 | public LlmExtension(ManualResetEvent extensionDisposedEvent)
20 | {
21 | this._extensionDisposedEvent = extensionDisposedEvent;
22 | }
23 |
24 | public object? GetProvider(ProviderType providerType)
25 | {
26 | return providerType switch
27 | {
28 | ProviderType.Commands => _provider,
29 | _ => null,
30 | };
31 | }
32 |
33 | public void Dispose() => this._extensionDisposedEvent.Set();
34 | }
35 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | # LLM Extension for Command Palette
6 |
7 | This is an extension for PowerToys Command Palette that allows you to chat with a large language model (LLM) directly.
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 | > [!NOTE]
21 | >
22 | > The Microsoft store page has been down due to crashes caused by a bug still in investigation, but you may still install the extension directly from Command Palette or from the [Release page](https://github.com/LioQing/llm-extension-for-cmd-pal/releases).
23 |
24 | ## Demo Video
25 |
26 | https://github.com/user-attachments/assets/d8b707a9-b086-470f-8d01-7508091ebd9d
27 |
28 | It currently supports the following APIs:
29 |
30 | - [Ollama](https://ollama.com/)
31 | - [OpenAI](https://platform.openai.com/docs/overview) (and any other compatible API, such as [Docker Model Runner](https://docs.docker.com/model-runner/))
32 | - [Azure OpenAI](https://learn.microsoft.com/en-us/azure/cognitive-services/openai/overview)
33 | - [Google](https://aistudio.google.com/)
34 | - [Mistral](https://console.mistral.ai/)
35 |
36 | ## Setup
37 |
38 | There is a [YouTube playlist with setup tutorials for different services](https://www.youtube.com/playlist?list=PLtpfYcxJV4LHu0gpKagHWjYR1Lghulnt8).
39 |
--------------------------------------------------------------------------------
/LlmExtension/Program.cs:
--------------------------------------------------------------------------------
1 | // Copyright (c) Microsoft Corporation
2 | // The Microsoft Corporation licenses this file to you under the MIT license.
3 | // See the LICENSE file in the project root for more information.
4 |
5 | using System;
6 | using System.Threading;
7 | using Microsoft.CommandPalette.Extensions;
8 |
9 | namespace LlmExtension;
10 |
11 | public class Program
12 | {
13 | [MTAThread]
14 | public static void Main(string[] args)
15 | {
16 | try
17 | {
18 | if (args.Length > 0 && args[0] == "-RegisterProcessAsComServer")
19 | {
20 | using ExtensionServer server = new();
21 | var extensionDisposedEvent = new ManualResetEvent(false);
22 | var extensionInstance = new LlmExtension(extensionDisposedEvent);
23 |
24 | // We are instantiating an extension instance once above, and returning it every time the callback in RegisterExtension below is called.
25 | // This makes sure that only one instance of SampleExtension is alive, which is returned every time the host asks for the IExtension object.
26 | // If you want to instantiate a new instance each time the host asks, create the new instance inside the delegate.
27 | server.RegisterExtension(() => extensionInstance);
28 |
29 | // This will make the main thread wait until the event is signalled by the extension class.
30 | // Since we have single instance of the extension object, we exit as soon as it is disposed.
31 | extensionDisposedEvent.WaitOne();
32 | }
33 | else
34 | {
35 | Console.WriteLine("Not being launched as a Extension... exiting.");
36 | }
37 | }
38 | catch (Exception e)
39 | {
40 | Console.WriteLine($"Unhandled exception caught: {e}");
41 | }
42 | }
43 | }
--------------------------------------------------------------------------------
/Directory.Packages.props:
--------------------------------------------------------------------------------
1 |
2 |
3 | true
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
--------------------------------------------------------------------------------
/LlmExtension.sln:
--------------------------------------------------------------------------------
1 |
2 | Microsoft Visual Studio Solution File, Format Version 12.00
3 | # Visual Studio Version 17
4 | VisualStudioVersion = 17.13.35507.96 d17.13
5 | MinimumVisualStudioVersion = 10.0.40219.1
6 | Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "LlmExtension", "LlmExtension\LlmExtension.csproj", "{79F86DE5-70B1-4EC1-9832-DF428B55E466}"
7 | EndProject
8 | Global
9 | GlobalSection(SolutionConfigurationPlatforms) = preSolution
10 | Debug|ARM64 = Debug|ARM64
11 | Debug|x64 = Debug|x64
12 | Debug|x86 = Debug|x86
13 | Release|ARM64 = Release|ARM64
14 | Release|x64 = Release|x64
15 | Release|x86 = Release|x86
16 | EndGlobalSection
17 | GlobalSection(ProjectConfigurationPlatforms) = postSolution
18 | {79F86DE5-70B1-4EC1-9832-DF428B55E466}.Debug|ARM64.ActiveCfg = Debug|ARM64
19 | {79F86DE5-70B1-4EC1-9832-DF428B55E466}.Debug|ARM64.Build.0 = Debug|ARM64
20 | {79F86DE5-70B1-4EC1-9832-DF428B55E466}.Debug|ARM64.Deploy.0 = Debug|ARM64
21 | {79F86DE5-70B1-4EC1-9832-DF428B55E466}.Debug|x64.ActiveCfg = Debug|x64
22 | {79F86DE5-70B1-4EC1-9832-DF428B55E466}.Debug|x64.Build.0 = Debug|x64
23 | {79F86DE5-70B1-4EC1-9832-DF428B55E466}.Debug|x64.Deploy.0 = Debug|x64
24 | {79F86DE5-70B1-4EC1-9832-DF428B55E466}.Debug|x86.ActiveCfg = Debug|x86
25 | {79F86DE5-70B1-4EC1-9832-DF428B55E466}.Debug|x86.Build.0 = Debug|x86
26 | {79F86DE5-70B1-4EC1-9832-DF428B55E466}.Debug|x86.Deploy.0 = Debug|x86
27 | {79F86DE5-70B1-4EC1-9832-DF428B55E466}.Release|ARM64.ActiveCfg = Release|ARM64
28 | {79F86DE5-70B1-4EC1-9832-DF428B55E466}.Release|ARM64.Build.0 = Release|ARM64
29 | {79F86DE5-70B1-4EC1-9832-DF428B55E466}.Release|ARM64.Deploy.0 = Release|ARM64
30 | {79F86DE5-70B1-4EC1-9832-DF428B55E466}.Release|x64.ActiveCfg = Release|x64
31 | {79F86DE5-70B1-4EC1-9832-DF428B55E466}.Release|x64.Build.0 = Release|x64
32 | {79F86DE5-70B1-4EC1-9832-DF428B55E466}.Release|x64.Deploy.0 = Release|x64
33 | {79F86DE5-70B1-4EC1-9832-DF428B55E466}.Release|x86.ActiveCfg = Release|x86
34 | {79F86DE5-70B1-4EC1-9832-DF428B55E466}.Release|x86.Build.0 = Release|x86
35 | {79F86DE5-70B1-4EC1-9832-DF428B55E466}.Release|x86.Deploy.0 = Release|x86
36 | EndGlobalSection
37 | GlobalSection(SolutionProperties) = preSolution
38 | HideSolutionNode = FALSE
39 | EndGlobalSection
40 | GlobalSection(ExtensibilityGlobals) = postSolution
41 | SolutionGuid = {CEDBC581-5818-4350-BC8A-A1ECE687D357}
42 | EndGlobalSection
43 | EndGlobal
44 |
--------------------------------------------------------------------------------
/LlmExtension/Package.appxmanifest:
--------------------------------------------------------------------------------
1 |
2 |
3 |
10 |
11 |
15 |
17 |
18 |
19 | LLM Extension for Command Palette
20 | Lio Qing
21 | Assets\StoreLogo.png
22 |
23 |
24 |
25 |
26 |
27 |
28 |
29 |
30 |
31 |
32 |
33 |
34 |
37 |
43 |
44 |
45 |
46 |
47 |
48 |
49 |
50 |
51 |
52 |
53 |
54 |
55 |
56 |
57 |
58 |
59 |
60 |
65 |
66 |
67 |
68 |
69 |
70 |
71 |
72 |
73 |
74 |
75 |
76 |
77 |
78 |
79 |
80 |
81 |
82 |
83 |
84 |
85 |
86 |
--------------------------------------------------------------------------------
/LlmExtension/LlmExtension.csproj:
--------------------------------------------------------------------------------
1 |
2 |
3 | WinExe
4 | LlmExtension
5 | app.manifest
6 |
7 | 10.0.22621.57
8 | net9.0-windows10.0.22621.0
9 | 10.0.19041.0
10 | 10.0.19041.0
11 | win-x64;win-arm64
12 |
13 | win-$(Platform).pubxml
14 | true
15 | enable
16 |
17 |
18 |
19 |
20 |
21 |
22 |
27 |
28 |
29 |
30 |
31 |
32 |
33 |
34 |
35 |
36 |
37 |
38 |
39 |
40 |
41 |
42 |
43 |
44 |
45 |
46 |
47 |
48 |
49 |
50 |
55 |
56 | true
57 |
58 |
59 |
60 |
61 | true
62 | 2
63 |
64 | IL2081
65 | True
66 | LlmExtension_TemporaryKey.pfx
67 | True
68 | False
69 | SHA256
70 | False
71 | False
72 | True
73 | Auto
74 | x64|arm64
75 | 0
76 |
77 |
78 |
79 |
80 |
81 |
--------------------------------------------------------------------------------
/Assets/Logo.svg:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
231 |
--------------------------------------------------------------------------------
/LlmExtension/Package.StoreAssociation.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 | CN=087E4036-09C3-4A81-B69A-ADD29D778B50
4 | Lio Qing
5 | MSA
6 | http://www.w3.org/2001/04/xmlenc#sha256
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 |
29 |
30 |
31 |
32 |
33 |
34 |
35 |
36 |
37 |
38 |
39 |
40 |
41 |
42 |
43 |
44 |
45 |
46 |
47 |
48 |
49 |
50 |
51 |
52 |
53 |
54 |
55 |
56 |
57 |
58 |
59 |
60 |
61 |
62 |
63 |
64 |
65 |
66 |
67 |
68 |
69 |
70 |
71 |
72 |
73 |
74 |
75 |
76 |
77 |
78 |
79 |
80 |
81 |
82 |
83 |
84 |
85 |
86 |
87 |
88 |
89 |
90 |
91 |
92 |
93 |
94 |
95 |
96 |
97 |
98 |
99 |
100 |
101 |
102 |
103 |
104 |
105 |
106 |
107 |
108 |
109 |
110 |
111 |
112 |
113 |
114 |
115 |
116 |
117 |
118 |
119 |
120 |
121 |
122 |
123 |
124 |
125 |
126 |
127 |
128 |
129 |
130 |
131 |
132 |
133 |
134 |
135 |
136 |
137 |
138 |
139 |
140 |
141 |
142 |
143 |
144 |
145 |
146 |
147 |
148 |
149 |
150 |
151 |
152 |
153 |
154 |
155 |
156 |
157 |
158 |
159 |
160 |
161 |
162 |
163 |
164 |
165 |
166 |
167 |
168 |
169 |
170 |
171 |
172 |
173 |
174 |
175 |
176 |
177 |
178 |
179 |
180 |
181 |
182 |
183 |
184 |
185 |
186 |
187 |
188 |
189 |
190 |
191 |
192 |
193 |
194 |
195 |
196 |
197 |
198 |
199 |
200 |
201 |
202 |
203 |
204 |
205 |
206 |
207 |
208 |
209 |
210 |
211 |
212 |
213 |
214 |
215 |
216 |
217 |
218 |
219 |
220 |
221 |
222 |
223 |
224 |
225 |
226 |
227 |
228 |
229 |
230 |
231 |
232 |
233 |
234 |
235 |
236 |
237 |
238 |
239 |
240 |
241 |
242 |
243 |
244 |
245 |
246 |
247 |
248 |
249 |
250 |
251 |
252 |
253 |
254 |
255 |
256 |
257 |
258 |
259 |
260 |
261 |
262 |
263 |
264 |
265 |
266 |
267 |
268 |
269 |
270 |
271 |
272 |
273 |
274 |
275 |
276 |
277 |
278 |
279 |
280 |
281 |
282 |
283 |
284 |
285 |
286 |
287 |
288 |
289 |
290 |
291 |
292 |
293 |
294 |
295 |
296 |
297 |
298 |
299 |
300 |
301 |
302 |
303 |
304 |
305 |
306 |
307 |
308 |
309 |
310 |
311 |
312 |
313 |
314 |
315 |
316 |
317 |
318 |
319 |
320 |
321 |
322 |
323 |
324 |
325 |
326 |
327 |
328 |
329 |
330 |
331 |
332 |
333 |
334 |
335 |
336 |
337 |
338 |
339 |
340 |
341 |
342 |
343 |
344 |
345 |
346 |
347 |
348 |
349 |
350 |
351 |
352 |
353 |
354 |
355 |
356 |
357 |
358 |
359 |
360 |
361 |
362 |
363 |
364 | LioQing.LLMExtensionforCommandPalette
365 |
366 | LLM Extension for Command Palette
367 |
368 |
369 |
370 | LioQing.CommandPalette-OllamaExtension
371 |
372 |
373 |
--------------------------------------------------------------------------------
/LlmExtension/Pages/LlmExtensionPage.cs:
--------------------------------------------------------------------------------
1 | // Copyright (c) Microsoft Corporation
2 | // The Microsoft Corporation licenses this file to you under the MIT license.
3 | // See the LICENSE file in the project root for more information.
4 |
5 | using System;
6 | using System.Collections.Generic;
7 | using System.IO;
8 | using System.Linq;
9 | using System.Net.Http;
10 | using System.Text.Json;
11 | using Microsoft.CommandPalette.Extensions;
12 | using Microsoft.CommandPalette.Extensions.Toolkit;
13 | using Windows.Foundation;
14 | using System.ClientModel;
15 | using Microsoft.SemanticKernel.ChatCompletion;
16 | using OllamaSharp;
17 | using Microsoft.SemanticKernel.Connectors.OpenAI;
18 | using Microsoft.SemanticKernel.Connectors.AzureOpenAI;
19 | using Microsoft.SemanticKernel.Connectors.Google;
20 | using Microsoft.SemanticKernel.Connectors.MistralAI;
21 | using Microsoft.SemanticKernel.Connectors.Ollama;
22 | using Microsoft.SemanticKernel;
23 | using System.Collections;
24 |
25 | namespace LlmExtension;
26 |
27 | internal sealed partial class LlmExtensionPage : DynamicListPage
28 | {
29 | ///
30 | /// Supported services.
31 | ///
32 | enum Service
33 | {
34 | Ollama,
35 | OpenAI,
36 | AzureOpenAI,
37 | Google,
38 | Mistral,
39 | }
40 |
41 | ///
42 | /// User and assistant chat conversation message.
43 | ///
44 | class ChatMessage
45 | {
46 | public required string User { get; set; }
47 | public required string Assistant { get; set; }
48 | }
49 |
50 | ///
51 | /// User configurations.
52 | ///
53 | class Config
54 | {
55 | public Service Service { get; set; } = Service.Ollama;
56 | public string Url { get; set; } = "";
57 | public string? Model { get; set; }
58 | public bool Details { get; set; } = true;
59 | public string? ApiKey { get; set; }
60 | public string System { get; set; } =
61 | "You are an AI assistant, you should provide help to the user with their query. " +
62 | "Act as if you are human with emotions, use fun and relaxing tone when appropriate. " +
63 | "However, remember you are still an AI model. Respond in Markdown format if necessary.";
64 |
65 | public int History { get; set; } = 6;
66 | public bool Debug { get; set; }
67 | public int MaxTokens { get; set; } = 4096;
68 | public float Temperature { get; set; } = 0.8f;
69 | public float TopP { get; set; } = 1.0f;
70 | }
71 |
72 | ///
73 | /// Client containing the configuration and the service.
74 | ///
75 | class Client
76 | {
77 | public required Config Config;
78 | private IChatCompletionService? Service { get; set; }
79 | public IEnumerable MissingConfigs { get; private set; } = new List();
80 |
81 | public void ReinitializeService()
82 | {
83 | MissingConfigs = MissingConfigsForService();
84 |
85 | if (MissingConfigs.Any())
86 | {
87 | return;
88 | }
89 |
90 | Service = CreateService(Config);
91 | }
92 |
93 | async public IAsyncEnumerable Chat(IEnumerable messages)
94 | {
95 | if (Service == null) yield break;
96 |
97 | // Build history
98 | ChatHistory history = [];
99 | history.AddSystemMessage(Config.System);
100 | foreach (var message in messages.Reverse())
101 | {
102 | if (!string.IsNullOrEmpty(message.User))
103 | {
104 | history.AddUserMessage(message.User);
105 |
106 | if (history.Count >= Config.History + 1)
107 | {
108 | break;
109 | }
110 | }
111 |
112 | if (!string.IsNullOrEmpty(message.Assistant))
113 | {
114 | history.AddAssistantMessage(message.Assistant);
115 |
116 | if (history.Count >= Config.History + 1)
117 | {
118 | break;
119 | }
120 | }
121 | }
122 |
123 | // Build execution settings
124 | #pragma warning disable SKEXP0070
125 | PromptExecutionSettings settings = Config.Service switch
126 | {
127 | LlmExtensionPage.Service.Ollama => new OllamaPromptExecutionSettings()
128 | {
129 | Temperature = Config.Temperature,
130 | TopP = Config.TopP,
131 | },
132 | LlmExtensionPage.Service.OpenAI => new OpenAIPromptExecutionSettings()
133 | {
134 | MaxTokens = Config.MaxTokens,
135 | Temperature = Config.Temperature,
136 | TopP = Config.TopP,
137 | },
138 | LlmExtensionPage.Service.AzureOpenAI => new AzureOpenAIPromptExecutionSettings()
139 | {
140 | MaxTokens = Config.MaxTokens,
141 | Temperature = Config.Temperature,
142 | TopP = Config.TopP,
143 | },
144 | LlmExtensionPage.Service.Google => new GeminiPromptExecutionSettings()
145 | {
146 | MaxTokens = Config.MaxTokens,
147 | Temperature = Config.Temperature,
148 | TopP = Config.TopP,
149 | },
150 | LlmExtensionPage.Service.Mistral => new MistralAIPromptExecutionSettings()
151 | {
152 | MaxTokens = Config.MaxTokens,
153 | Temperature = Config.Temperature,
154 | TopP = Config.TopP,
155 | },
156 | };
157 |
158 | await foreach (var content in Service.GetStreamingChatMessageContentsAsync(history, settings))
159 | {
160 | if (string.IsNullOrEmpty(content.Content))
161 | {
162 | continue;
163 | }
164 |
165 | yield return content.Content;
166 | }
167 | }
168 |
169 | public IEnumerable MissingConfigsForService()
170 | {
171 | var missingFields = new List();
172 |
173 | if (string.IsNullOrEmpty(Config.Model)) missingFields.Add("model");
174 |
175 | if (Config.Service == LlmExtensionPage.Service.Ollama)
176 | {
177 | if (string.IsNullOrEmpty(Config.Url)) missingFields.Add("url");
178 | }
179 | else if (Config.Service == LlmExtensionPage.Service.AzureOpenAI)
180 | {
181 | if (string.IsNullOrEmpty(Config.ApiKey)) missingFields.Add("apikey");
182 | }
183 | else if (Config.Service == LlmExtensionPage.Service.Google)
184 | {
185 | if (string.IsNullOrEmpty(Config.ApiKey)) missingFields.Add("apikey");
186 | }
187 | else if (Config.Service == LlmExtensionPage.Service.Mistral)
188 | {
189 | if (string.IsNullOrEmpty(Config.ApiKey)) missingFields.Add("apikey");
190 | }
191 |
192 | return missingFields;
193 | }
194 |
195 | public static IChatCompletionService CreateService(Config config)
196 | {
197 | return config.Service switch
198 | {
199 | LlmExtensionPage.Service.Ollama => CreateOllamaService(config),
200 | LlmExtensionPage.Service.OpenAI => CreateOpenAIService(config),
201 | LlmExtensionPage.Service.AzureOpenAI => CreateAzureOpenAIService(config),
202 | LlmExtensionPage.Service.Google => CreateGoogleService(config),
203 | LlmExtensionPage.Service.Mistral => CreateMistralService(config),
204 | _ => throw new ArgumentException("Invalid service name")
205 | };
206 | }
207 |
208 | #pragma warning disable SKEXP0001
209 | private static IChatCompletionService CreateOllamaService(Config config) => new OllamaApiClient(
210 | uriString: config.Url,
211 | defaultModel: config.Model ?? ""
212 | ).AsChatCompletionService();
213 |
214 | private static OpenAIChatCompletionService CreateOpenAIService(Config config) => new(
215 | config.Model ?? " ",
216 | new OpenAI.OpenAIClient(
217 | new ApiKeyCredential(config.ApiKey ?? " "),
218 | new OpenAI.OpenAIClientOptions() { Endpoint = string.IsNullOrEmpty(config.Url) ? null : new Uri(config.Url) }));
219 |
220 | private static AzureOpenAIChatCompletionService CreateAzureOpenAIService(Config config) => new(
221 | config.Model ?? " ",
222 | new Azure.AI.OpenAI.AzureOpenAIClient(new Uri(config.Url), new ApiKeyCredential(config.ApiKey ?? " ")));
223 |
224 | #pragma warning disable SKEXP0070
225 | private static GoogleAIGeminiChatCompletionService CreateGoogleService(Config config) => new(
226 | config.Model ?? " ",
227 | config.ApiKey ?? " ");
228 |
229 | private static MistralAIChatCompletionService CreateMistralService(Config config) => new(
230 | config.Model ?? " ",
231 | config.ApiKey ?? " ",
232 | string.IsNullOrEmpty(config.Url) ? null : new Uri(config.Url));
233 | }
234 |
235 | private static readonly string ConfigPath = "%USERPROFILE%\\.config\\LlmExtensionForCmdPal\\config.json";
236 | private static readonly string HelpMessage =
237 | "## What is this extension\n" +
238 | "\n" +
239 | "This extension allows you to chat with LLM models, including Ollama, OpenAI, Azure OpenAI, Google Gemini, or Mistral, either hosted by yourself or by a third party.\n" +
240 | "\n" +
241 | "## How to use this extension\n" +
242 | "\n" +
243 | "There are two ways to interact with this extension:\n" +
244 | "\n" +
245 | "1. **Chat**: Type your message in the search box and press enter to send it to the LLM model. The model will respond with a message.\n" +
246 | "\n" +
247 | "2. **Commands**: Type `/` in the search box to see a list of available commands. You can use these commands to configure the extension, such as setting the model, API key, and other options.\n" +
248 | "\n" +
249 | "## Setting up the extension\n" +
250 | "\n" +
251 | "1. **Setup your LLM model**: You need to have a LLM model running on your local machine or a server. Visit the supported service providers respective websites for more information on how to set them up.\n" +
252 | "\n" +
253 | "2. **Configure the extension**: Use commands to setup the connection with the LLM model.\n" +
254 | "\n" +
255 | " - `/service `: Set the API service to call (Ollama, OpenAI, AzureOpenAI, Google, or Mistral).\n" +
256 | " - For other services with OpenAPI compatible APIs such as Docker Model Runner, use the `OpenAI` service.\n" +
257 | " - `/url `: Set the server URL.\n" +
258 | " - For services that do not need a URL, you may enter `/url ` without any arguments.\n" +
259 | " - For Ollama, usually `http://localhost:11434/`.\n" +
260 | " - For AzureOpenAI, usually `https://your-id.openai.azure.com/`.\n" +
261 | " - For Docker Model Runner, usually `https://localhost:your-port/engines/llama.cpp/v1/` with OpenAI service.\n" +
262 | " - `/model `: Set the model to use.\n" +
263 | " - For AzureOpenAI, this is the deployment name.\n" +
264 | " - `/apikey `: Set the API key.\n" +
265 | " - For Ollama, this is not applicable.\n" +
266 | "\n" +
267 | "3. **Send a message**: Type your message in the search box and press enter to send it to the LLM model. The model will respond with a message.\n" +
268 | "\n" +
269 | "## YouTube Playlist\n" +
270 | "\n" +
271 | "There is also a YouTube playlist introducing the usage of the extension! Run the command `/videos` to open the link.";
272 |
273 | private readonly Client _client;
274 | private readonly IList _messages;
275 | private readonly IDictionary, Func<(string, string)>?, string?, Action?)> _commands;
276 |
277 | private (string, ListItem)[] _commandsMemo;
278 | private ListItem[] _messagesMemo;
279 |
280 | public LlmExtensionPage()
281 | {
282 | Icon = IconHelpers.FromRelativePath("Assets\\SmallLlmExtensionLogo.png");
283 | Title = "LLM Chat";
284 | Name = "Chat with LLM";
285 | PlaceholderText = "Type here to chat, or start with '/' to use commands";
286 | ShowDetails = true;
287 |
288 | _client = new Client()
289 | {
290 | Config = ReadConfig()
291 | };
292 | _client.ReinitializeService();
293 | _messages = [new() { User = "", Assistant = "" }];
294 | _commands = new Dictionary, Func<(string, string)>?, string?, Action?)>()
295 | {
296 | { "service", (
297 | "",
298 | () => $"Set the API service to call (currently: {_client.Config.Service})",
299 | null,
300 | null,
301 | (sender, args, opts) => {
302 | Service? service = opts.ToLowerInvariant() switch
303 | {
304 | "ollama" => Service.Ollama,
305 | "openai" => Service.OpenAI,
306 | "azureopenai" => Service.AzureOpenAI,
307 | "google" => Service.Google,
308 | "mistral" => Service.Mistral,
309 | _ => null,
310 | };
311 |
312 | if (service == null)
313 | {
314 | ErrorToast($"Invalid service '{opts}', expected one of 'Ollama', 'OpenAI', 'AzureOpenAI', 'Google', 'Mistral'");
315 | return;
316 | }
317 | else
318 | {
319 | _client.Config.Service = service ?? throw new ArgumentException();
320 | }
321 |
322 | RefreshConfigs();
323 | })
324 | },
325 | { "clear", (null, () => $"Clear message history ({_messages.Count} message" + (_messages.Count > 1 ? "s" : "") + ")", null, null, (sender, args, opts) => {
326 | _messages.Clear();
327 | _messages.Add(new() { User = "", Assistant = "" });
328 | SearchText = "";
329 | RaiseItemsChanged();
330 | }) },
331 | { "url", ("", () => $"Set server URL (current: {_client.Config.Url})", null, null, (sender, args, opts) =>
332 | {
333 | _client.Config.Url = opts;
334 | RefreshConfigs();
335 | }) },
336 | { "model", ("", () => $"Set the model to use (current: {_client.Config.Model})", null, null, (sender, args, opts) =>
337 | {
338 | _client.Config.Model = opts;
339 | RefreshConfigs();
340 | }) },
341 | { "apikey", ("", () => $"Set the API key (Ollama not applicable)", null, null, (sender, args, opts) =>
342 | {
343 | _client.Config.ApiKey = opts;
344 | RefreshConfigs();
345 | }) },
346 | { "detail", (null, () => $"Toggle full detailed response on the side (current: {_client.Config.Details})", null, null, (sender, args, opts) =>
347 | {
348 | _client.Config.Details = !_client.Config.Details;
349 | RefreshConfigs();
350 | }) },
351 | { "system", (
352 | "",
353 | () => $"Set the system prompt",
354 | () => ("Current System Prompt", _client.Config.System),
355 | null,
356 | (sender, args, opts) =>
357 | {
358 | _client.Config.System = opts;
359 | RefreshConfigs();
360 | }
361 | ) },
362 | { "history", (
363 | "",
364 | () => $"Set the message history count (current: {_client.Config.History})",
365 | null,
366 | null,
367 | (sender, args, opts) =>
368 | {
369 | try
370 | {
371 | var count = int.Parse(opts);
372 |
373 | if (count <= 0)
374 | {
375 | ErrorToast($"Invalid history count {count}, expected positive integer");
376 | return;
377 | }
378 |
379 | _client.Config.History = count;
380 |
381 | RefreshConfigs();
382 | }
383 | catch (FormatException) when (!_client.Config.Debug)
384 | {
385 | ErrorToast($"Invalid history count '{opts}', expected integer");
386 | return;
387 | }
388 | }
389 | ) },
390 | { "maxtokens", (
391 | "",
392 | () => $"Set the maximum token count (Ollama not applicable, current: {_client.Config.MaxTokens})",
393 | null,
394 | null,
395 | (sender, args, opts) =>
396 | {
397 | try
398 | {
399 | var count = int.Parse(opts);
400 |
401 | if (count <= 0)
402 | {
403 | ErrorToast($"Invalid token count {count}, expected positive integer");
404 | return;
405 | }
406 |
407 | _client.Config.MaxTokens = count;
408 |
409 | RefreshConfigs();
410 | }
411 | catch (FormatException) when (!_client.Config.Debug)
412 | {
413 | ErrorToast($"Invalid token count '{opts}', expected integer");
414 | return;
415 | }
416 | }
417 | ) },
418 | { "temperature", (
419 | "",
420 | () => $"Set the model temperature, indicating creativeness (current: {_client.Config.Temperature})",
421 | null,
422 | null,
423 | (sender, args, opts) =>
424 | {
425 | try
426 | {
427 | var value = float.Parse(opts);
428 |
429 | if (value < 0.0 || value > 1.0)
430 | {
431 | ErrorToast($"Invalid temperature {value}, expected floating point number between 0.0 and 1.0");
432 | return;
433 | }
434 |
435 | _client.Config.Temperature = value;
436 |
437 | RefreshConfigs();
438 | }
439 | catch (FormatException) when (!_client.Config.Debug)
440 | {
441 | ErrorToast($"Invalid temperature '{opts}', expected floating point number");
442 | return;
443 | }
444 | }
445 | ) },
446 | { "topp", (
447 | "",
448 | () => $"Set the model top P, indicating randomness (current: {_client.Config.TopP})",
449 | null,
450 | null,
451 | (sender, args, opts) =>
452 | {
453 | try
454 | {
455 | var value = float.Parse(opts);
456 |
457 | if (value < 0.0 || value > 1.0)
458 | {
459 | ErrorToast($"Invalid top P {value}, expected floating point number between 0.0 and 1.0");
460 | return;
461 | }
462 |
463 | _client.Config.TopP = value;
464 |
465 | RefreshConfigs();
466 | }
467 | catch (FormatException) when (!_client.Config.Debug)
468 | {
469 | ErrorToast($"Invalid top P '{opts}', expected floating point number");
470 | return;
471 | }
472 | }
473 | ) },
474 | { "help", (
475 | null,
476 | () => $"Help message on usage of this extension",
477 | () => ("Help message", HelpMessage),
478 | null,
479 | null
480 | ) },
481 | { "videos", (
482 | null,
483 | () => $"Open the YouTube playlist of introducing the usage of this extension",
484 | null,
485 | "https://www.youtube.com/playlist?list=PLtpfYcxJV4LHu0gpKagHWjYR1Lghulnt8",
486 | null
487 | ) },
488 | { "config", (
489 | null,
490 | () => $"Open the folder containing the configuration file",
491 | null,
492 | Environment.ExpandEnvironmentVariables("%USERPROFILE%\\.config\\LlmExtensionForCmdPal"),
493 | null
494 | ) },
495 | { "debug", (null, () => $"Toggle printing of the complete exception (current: {_client.Config.Debug})", null, null, (sender, args, opts) =>
496 | {
497 | _client.Config.Debug = !_client.Config.Debug;
498 | RefreshConfigs();
499 | }) },
500 | { "reset", (null, () => "Reset all settings", null, null, (sender, args, opts) =>
501 | {
502 | _client.Config = new Config();
503 | RefreshConfigs();
504 | }) },
505 | };
506 |
507 | UpdateCommandsMemo();
508 | UpdateMessagesMemo();
509 | }
510 |
511 | public override void UpdateSearchText(string oldSearch, string newSearch) {
512 | if (string.IsNullOrEmpty(oldSearch) != string.IsNullOrEmpty(newSearch) || oldSearch.StartsWith('/') != newSearch.StartsWith('/'))
513 | {
514 | if (!IsLoading)
515 | {
516 | _messages[0].User = newSearch;
517 | }
518 | UpdateMessagesMemo();
519 | RaiseItemsChanged();
520 | } else if (newSearch.StartsWith('/'))
521 | {
522 | RaiseItemsChanged();
523 | }
524 | }
525 |
526 | public override IListItem[] GetItems()
527 | {
528 | try
529 | {
530 | if (!IsLoading && SearchText.StartsWith('/'))
531 | {
532 | var commandText = SearchText[1..];
533 |
534 | if (commandText.Contains(' '))
535 | {
536 | commandText = commandText[..commandText.IndexOf(' ')];
537 | }
538 |
539 | return _commandsMemo
540 | .OrderByDescending(c => c.Item1.Zip(commandText).TakeWhile((pair) => pair.First == pair.Second).Count())
541 | .Select(c => c.Item2)
542 | .ToArray();
543 | }
544 |
545 | if (_client.MissingConfigs.Any())
546 | {
547 | return [new ListItem(new NoOpCommand()) {
548 | Icon = new IconInfo("\u26A0"),
549 | Title = $"Configuration incomplete for {_client.Config.Service}",
550 | Subtitle = $"The missing configurations are: {string.Join(", ", _client.MissingConfigs)}" }];
551 | }
552 |
553 | return _messagesMemo;
554 | }
555 | catch (Exception ex)
556 | {
557 | ErrorToast(_client.Config.Debug ? ex.ToString() : "An unexpected error occurred");
558 | IsLoading = false;
559 |
560 | return [];
561 | }
562 | }
563 |
564 | private void SaveConfig()
565 | {
566 | var path = Environment.ExpandEnvironmentVariables(ConfigPath);
567 | var dir = Path.GetDirectoryName(path)!;
568 |
569 | if (!Directory.Exists(dir))
570 | Directory.CreateDirectory(dir);
571 |
572 | var json = JsonSerializer.Serialize(_client.Config, new JsonSerializerOptions() { WriteIndented = true });
573 | File.WriteAllText(path, json);
574 | }
575 |
576 | private static Config ReadConfig()
577 | {
578 | var path = Environment.ExpandEnvironmentVariables(ConfigPath);
579 | var dir = Path.GetDirectoryName(path)!;
580 |
581 | if (!Directory.Exists(dir))
582 | Directory.CreateDirectory(dir);
583 |
584 | if (!File.Exists(path))
585 | {
586 | var defaultConfig = new Config();
587 | var json = JsonSerializer.Serialize(defaultConfig, new JsonSerializerOptions() { WriteIndented = true });
588 | File.WriteAllText(path, json);
589 | return defaultConfig;
590 | }
591 |
592 | try
593 | {
594 | var fileContent = File.ReadAllText(path);
595 | var config = JsonSerializer.Deserialize(fileContent) ?? new Config();
596 | return config;
597 | }
598 | catch (JsonException)
599 | {
600 | var defaultConfig = new Config();
601 | var json = JsonSerializer.Serialize(defaultConfig, new JsonSerializerOptions() { WriteIndented = true });
602 | File.WriteAllText(path, json);
603 | return defaultConfig;
604 | }
605 | }
606 |
607 | private void RefreshConfigs()
608 | {
609 | _client.ReinitializeService();
610 | SaveConfig();
611 | SearchText = "";
612 | UpdateCommandsMemo();
613 | UpdateMessagesMemo();
614 | RaiseItemsChanged();
615 | }
616 |
617 | private void UpdateCommandsMemo()
618 | {
619 | _commandsMemo = _commands.Select(c =>
620 | {
621 | ICommand command = c.Value.Item5 != null
622 | ? SendMessageCommand.CreateCommand(c.Key, c.Value, () => SearchText, _client.Config.Debug)
623 | : c.Value.Item3 != null
624 | ? new MarkdownPage(c.Value.Item3.Invoke().Item1, c.Value.Item3.Invoke().Item2)
625 | : c.Value.Item4 != null
626 | ? new OpenUrlCommand(c.Value.Item4)
627 | : new NoOpCommand();
628 |
629 | var item = new ListItem(command)
630 | {
631 | Title = $"/{c.Key}",
632 | Subtitle = c.Value.Item2.Invoke()
633 | };
634 |
635 | if (c.Value.Item1 != null)
636 | {
637 | item.Title += $" {c.Value.Item1}";
638 | }
639 |
640 | if (c.Value.Item3 != null)
641 | {
642 | var details = c.Value.Item3.Invoke();
643 | item.Details = new Details()
644 | {
645 | Title = details.Item1,
646 | Body = details.Item2,
647 | };
648 | }
649 |
650 | return (c.Key, item);
651 | })
652 | .ToArray();
653 | }
654 |
655 | private void UpdateMessagesMemo()
656 | {
657 | _messagesMemo = _messages.SelectMany(m =>
658 | {
659 | if (string.IsNullOrEmpty(m.Assistant))
660 | {
661 | if (string.IsNullOrEmpty(m.User))
662 | {
663 | return [];
664 | }
665 | else
666 | {
667 | var command = new SendMessageCommand() { Debug = _client.Config.Debug };
668 | if (!IsLoading)
669 | {
670 | command.SendMessage += async (sender, args) =>
671 | {
672 | try
673 | {
674 | _messages[0].User = SearchText;
675 |
676 | IsLoading = true;
677 |
678 | await foreach (var response in _client.Chat(_messages))
679 | {
680 | m.Assistant += response;
681 | RaiseItemsChanged();
682 | }
683 |
684 | SearchText = "";
685 | _messages.Insert(0, new() { User = "", Assistant = "" });
686 | RaiseItemsChanged();
687 | }
688 | catch (Exception ex) when (!_client.Config.Debug && (ex is HttpRequestException || ex is ClientResultException))
689 | {
690 | ErrorToast(
691 | $"Error calling API over HTTP, is there a '{_client.Config.Service}' server running and accepting connections " +
692 | $"at '{_client.Config.Url}' with model '{_client.Config.Model}'? Or perhaps the API key is incorrect?"
693 | );
694 | }
695 | catch (HttpOperationException ex) when (_client.Config.Debug)
696 | {
697 | var dataString = "";
698 | foreach (DictionaryEntry item in ex.Data)
699 | {
700 | dataString += $"{item.Key}: {item.Value}\n";
701 | }
702 |
703 | if (!string.IsNullOrEmpty(dataString))
704 | {
705 | dataString = "\n" + dataString;
706 | }
707 |
708 | ErrorToast($"An HTTP error occurred: {ex.Message} with inner exception {ex.InnerException}{dataString}");
709 | }
710 | catch (Exception ex)
711 | {
712 | ErrorToast(_client.Config.Debug ? ex.ToString() : "An error occurred when attempting to chat with LLM");
713 | }
714 | finally
715 | {
716 | IsLoading = false;
717 | }
718 | };
719 | }
720 |
721 | return [new ListItem(command) { Title = "Press enter to send" }];
722 | }
723 | }
724 | else
725 | {
726 | var item = new ListItem(new DetailedResponsePage(m.User, m.Assistant))
727 | {
728 | Title = m.Assistant,
729 | Subtitle = m.User,
730 | };
731 |
732 | if (_client.Config.Details)
733 | {
734 | item.Details = new Details()
735 | {
736 | Title = m.User,
737 | Body = m.Assistant,
738 | };
739 | }
740 |
741 | return [item];
742 | }
743 | }).ToArray();
744 | }
745 |
746 | internal static void ErrorToast(string message)
747 | {
748 | new ToastStatusMessage(new StatusMessage()
749 | {
750 | Message = message,
751 | State = MessageState.Error,
752 | })
753 | {
754 | Duration = 10000,
755 | }.Show();
756 | }
757 | }
758 |
759 | internal sealed partial class SendMessageCommand : InvokableCommand
760 | {
761 | public event TypedEventHandler? SendMessage;
762 | public required bool Debug { get; set; }
763 |
764 | public static SendMessageCommand CreateCommand(
765 | string key,
766 | (string?, Func, Func<(string, string)>?, string?, Action?) value,
767 | Func searchTextFunc,
768 | bool debug)
769 | {
770 | var command = new SendMessageCommand() { Debug = debug };
771 | command.SendMessage += (sender, args) =>
772 | {
773 | var searchText = searchTextFunc();
774 | var opts = "";
775 |
776 | if (!searchText.StartsWith($"/{key}", StringComparison.InvariantCulture))
777 | {
778 | LlmExtensionPage.ErrorToast($"Command '{searchText}' not found");
779 | return;
780 | }
781 |
782 | if (value.Item1 != null)
783 | {
784 | if (searchText.StartsWith($"/{key} ", StringComparison.InvariantCulture))
785 | {
786 | if (searchText.Length > $"/{key} ".Length)
787 | {
788 | opts = searchText[$"/{key} ".Length..].Trim();
789 | }
790 | }
791 | else
792 | {
793 | LlmExtensionPage.ErrorToast($"Expected argument '{value.Item1}' for command '/{key}'");
794 | return;
795 | }
796 | }
797 |
798 | value.Item5?.Invoke(sender, args, opts);
799 | };
800 |
801 | return command;
802 | }
803 |
804 | public override ICommandResult Invoke()
805 | {
806 | CommandResult? result = null;
807 |
808 | try
809 | {
810 | SendMessage?.Invoke(this, result);
811 | }
812 | catch (Exception ex)
813 | {
814 | LlmExtensionPage.ErrorToast(Debug ? ex.ToString() : "An error occurred when running the command");
815 | }
816 | return CommandResult.KeepOpen();
817 | }
818 | }
819 |
820 | internal sealed partial class DetailedResponsePage : ContentPage
821 | {
822 | public string User { get; set; }
823 | public string Assistant { get; set; }
824 |
825 | public DetailedResponsePage(string user, string assistant)
826 | {
827 | Title = "Detailed Response";
828 | User = user;
829 | Assistant = assistant;
830 | }
831 |
832 | public override IContent[] GetContent()
833 | {
834 | return [
835 | new MarkdownContent($"**{User}**\n\n---\n\n{Assistant}"),
836 | ];
837 | }
838 | }
839 |
840 | internal sealed partial class MarkdownPage : ContentPage
841 | {
842 | public string Content { get; set; }
843 |
844 | public MarkdownPage(string title, string content)
845 | {
846 | Title = title;
847 | Content = content;
848 | }
849 |
850 | public override IContent[] GetContent()
851 | {
852 | return [
853 | new MarkdownContent(Content),
854 | ];
855 | }
856 | }
--------------------------------------------------------------------------------