├── .devcontainer ├── Dockerfile ├── devcontainer.json └── post-create.sh ├── .gitignore ├── LICENSE ├── README.md ├── images ├── 20LMStudioSearchModel.png ├── 22ServerRunning.png ├── 30RunSimpleDemo.png ├── 35Localhostnetworkerror.png ├── 40chatfulldemo.gif └── SK-LM-Phi2-demo.gif └── src ├── sk-customLLM-customHttpClient ├── Program.cs └── sk-customLLM-customHttpClient.csproj ├── sk-customllm.sln ├── sk-customllm ├── CustomChatCompletionService.cs ├── CustomHttpMessageHandler.cs ├── Models │ ├── ChatMessage.cs │ ├── ChatRequest.cs │ └── ChatResponse.cs └── sk-customllm.csproj ├── sk-ollama-localserver-rpi ├── Program.cs └── sk-ollama-localserver-rpi.csproj ├── sk-ollama-localserver-ubuntu ├── Program.cs └── sk-ollama-localserver-ubuntu.csproj ├── sk-ollamacsharp ├── OllamaChatCompletionService.cs ├── OllamaTextGenerationService.cs └── sk-ollamacsharp.csproj └── sk-phi2-localserver-lmstudio ├── Program.cs └── sk-phi2-localserver-lmstudio.csproj /.devcontainer/Dockerfile: -------------------------------------------------------------------------------- 1 | # [Choice] .NET version: 7.0, 7.0-bullseye-slim, 7.0-jammy, 6.0, 6.0-bullseye-slim, 6.0-jammy, 6.0-focal 2 | ARG VARIANT="8.0-jammy" 3 | FROM mcr.microsoft.com/dotnet/sdk:${VARIANT} 4 | 5 | # [Optional] Uncomment this section to install additional OS packages. 6 | # RUN apt-get update && export DEBIAN_FRONTEND=noninteractive \ 7 | # && apt-get -y install --no-install-recommends 8 | -------------------------------------------------------------------------------- /.devcontainer/devcontainer.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "DevContainer for .NET", 3 | "build": { 4 | "dockerfile": "./Dockerfile", 5 | "context": ".", 6 | "args": { 7 | // version: 7.0, 7.0-bullseye-slim, 7.0-jammy, 6.0, 6.0-bullseye-slim, 6.0-jammy, 6.0-focal 8 | "VARIANT": "8.0" 9 | } 10 | }, 11 | // Use 'forwardPorts' to make a list of ports inside the container available locally. 12 | "forwardPorts": [ 13 | // ASP.NET Core Web/API App, Blazor App 14 | 5000, 15 | 5001, 16 | // Azure Static Web Apps 17 | 4280 18 | ], 19 | "features": { 20 | // Azure CLI 21 | "ghcr.io/devcontainers/features/azure-cli:1": { 22 | "version": "latest" 23 | }, 24 | // GitHub CLI 25 | "ghcr.io/devcontainers/features/github-cli:1": { 26 | "version": "latest" 27 | }, 28 | // node.js 29 | "ghcr.io/devcontainers/features/node:1": { 30 | // version: 'latest', 'lts', '18', '16', '14' 31 | "version": "lts", 32 | "nodeGypDependencies": true, 33 | "nvmInstallPath": "/usr/local/share/nvm" 34 | }, 35 | // Install common utilities 36 | "ghcr.io/devcontainers/features/common-utils:1": { 37 | "installZsh": true, 38 | "installOhMyZsh": true, 39 | "upgradePackages": true, 40 | "username": "vscode", 41 | "uid": "1000", 42 | "gid": "1000" 43 | } 44 | }, 45 | "overrideFeatureInstallOrder": [ 46 | "ghcr.io/devcontainers/features/common-utils" 47 | ], 48 | // Configure tool-specific properties. 49 | "customizations": { 50 | // Configure properties specific to VS Code. 51 | "vscode": { 52 | // Add the IDs of extensions you want installed when the container is created. 53 | "extensions": [ 54 | "ms-dotnettools.csharp", 55 | "ms-vscode.PowerShell", 56 | "ms-vscode.vscode-node-azure-pack", 57 | "VisualStudioExptTeam.vscodeintellicode", 58 | "github.copilot" 59 | ], 60 | "settings": { 61 | // Uncomment if you want to disable the minimap view 62 | // "editor.minimap.enabled": false, 63 | // Recommended settings for the explorer pane 64 | "explorer.sortOrder": "type", 65 | "explorer.fileNesting.enabled": true, 66 | "explorer.fileNesting.patterns": { 67 | "*.js": "${capture}.js.map", 68 | "*.razor": "${capture}.razor.cs,${capture}.razor.css" 69 | } 70 | } 71 | }, 72 | "codespaces": { 73 | "openFiles": [ 74 | "src/sk-phi2-localserver-lmstudio/Program.cs", 75 | "README.md" 76 | ] 77 | } 78 | }, 79 | // Uncomment if you want to use bash in 'postCreateCommand' after the container is created 80 | "postCreateCommand": "/bin/bash ./.devcontainer/post-create.sh > ~/post-create.log", 81 | // Uncomment if you want to connect other than 'root'. More info: https://aka.ms/vscode-remote/containers/non-root. 82 | "remoteUser": "vscode" 83 | } -------------------------------------------------------------------------------- /.devcontainer/post-create.sh: -------------------------------------------------------------------------------- 1 | ## Install additional apt packages 2 | sudo apt-get update && \ 3 | sudo apt-get install -y dos2unix libsecret-1-0 4 | 5 | ## Configure git 6 | git config --global core.autocrlf input 7 | 8 | ## Enable local HTTPS for .NET 9 | dotnet dev-certs https --trust 10 | 11 | ## Restore .NET packages and build the default solution 12 | dotnet restore && dotnet build 13 | 14 | ## AZURE CLI EXTENSIONS ## 15 | # Uncomment the below to install Azure CLI extensions 16 | # extensions=(account alias deploy-to-azure functionapp subscription webapp) 17 | # for extension in $extensions; 18 | # do 19 | # az extension add --name $extension 20 | # done 21 | 22 | ## AZURE BICEP CLI ## 23 | # Uncomment the below to install Azure Bicep CLI. 24 | # az bicep install 25 | 26 | ## AZURE FUNCTIONS CORE TOOLS ## 27 | # Uncomment the below to install Azure Functions Core Tools. Make sure you have installed node.js 28 | # npm i -g azure-functions-core-tools@4 --unsafe-perm true 29 | 30 | ## Azurite ## 31 | # Uncomment the below to install Azurite. Make sure you have installed node.js 32 | # npm install -g azurite 33 | 34 | ## AZURE STATIC WEB APPS CLI ## 35 | # Uncomment the below to install Azure Static Web Apps CLI. Make sure you have installed node.js 36 | npm install -g @azure/static-web-apps-cli 37 | 38 | ## AZURE DEV CLI ## 39 | # Uncomment the below to install Azure Dev CLI. Make sure you have installed Azure CLI and GitHub CLI 40 | # curl -fsSL https://aka.ms/install-azd.sh | bash 41 | 42 | ## OH-MY-ZSH PLUGINS & THEMES (POWERLEVEL10K) ## 43 | # Uncomment the below to install oh-my-zsh plugins and themes (powerlevel10k) without dotfiles integration 44 | # git clone https://github.com/zsh-users/zsh-completions.git $HOME/.oh-my-zsh/custom/plugins/zsh-completions 45 | # git clone https://github.com/zsh-users/zsh-syntax-highlighting.git $HOME/.oh-my-zsh/custom/plugins/zsh-syntax-highlighting 46 | # git clone https://github.com/zsh-users/zsh-autosuggestions.git $HOME/.oh-my-zsh/custom/plugins/zsh-autosuggestions 47 | 48 | # git clone https://github.com/romkatv/powerlevel10k.git $HOME/.oh-my-zsh/custom/themes/powerlevel10k --depth=1 49 | # ln -s $HOME/.oh-my-zsh/custom/themes/powerlevel10k/powerlevel10k.zsh-theme $HOME/.oh-my-zsh/custom/themes/powerlevel10k.zsh-theme 50 | 51 | ## OH-MY-POSH ## 52 | # Uncomment the below to install oh-my-posh 53 | sudo wget https://github.com/JanDeDobbeleer/oh-my-posh/releases/latest/download/posh-linux-amd64 -O /usr/local/bin/oh-my-posh 54 | sudo chmod +x /usr/local/bin/oh-my-posh 55 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | src/.vs/** 2 | src/sk-phi2-localserver-lmstudio/obj/** 3 | src/sk-phi2-localserver-lmstudio/bin/** 4 | src/sk-customllm/bin/** 5 | src/sk-customllm/obj/** 6 | src/sk-ollama-localserver-ubuntu/bin/** 7 | src/sk-ollama-localserver-ubuntu/obj/** 8 | src/sk-ollama-localserver-rpi/bin/** 9 | src/sk-ollama-localserver-rpi/obj/** 10 | /src/sk-ollamacsharp/bin/Debug/net8.0 11 | /src/sk-ollamacsharp/obj 12 | /src/sk-customLLM-customHttpClient/bin/Debug/net8.0 13 | /src/sk-customLLM-customHttpClient/obj 14 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2024 El Bruno 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Using Semantic Kernel with local LLMs 2 | ## Running a local webserver with LMStudio to expose Phi-2 model 3 | 4 | [![License: MIT](https://img.shields.io/badge/License-MIT-yellow.svg)](/LICENSE) 5 | [![Twitter: elbruno](https://img.shields.io/twitter/follow/elbruno.svg?style=social)](https://twitter.com/elbruno) 6 | ![GitHub: elbruno](https://img.shields.io/github/followers/elbruno?style=social) 7 | 8 | ✨ 9 | This is a quickstart for sample to show how to run a SLM (small language model: Phi-2) in local mode with LMStudioAI, and how to interact with the model using SemanticKernel. 10 | 11 | ## Getting started - Quick guide 12 | 13 | 1. **🌐 Start Local Inference Server**: Open [LM Studio](https://lmstudio.ai/) and start the webserver with your favourite LLM. 14 | 15 | 1. **📤 One-click setup**: [Open a new Codespace](https://codespaces.new/elbruno/sk-phi2-localserver-lmstudio), giving you a fully configured cloud developer environment. 16 | 17 | 1. **💬 Change your chat questions**: Update the chat code in `src/sk-phi2-localserver-lmstudio/Program.cs`. 18 | 19 | 1. **▶️ Run, one-click again**: Use VS Code's built-in *Run* command. Check LM Studio logs and app logs to see the model running. 20 | 21 | 1. **🔄 Iterate quickly:** Codespaces updates the server on each save, and VS Code's debugger lets you dig into the code execution. 22 | 23 | ## Configure your environment 24 | Before you get started, make sure you have the following requirements in place: 25 | - [Visual Studio Code](http://aka.ms/vscode) with extensions: 26 | - [C# Extension](https://aka.ms/csharp/vscode) 27 | 28 | - [.NET 8.0 SDK](https://aka.ms/net80) for building and deploying .NET 8 projects. 29 | - [LM Studio](https://lmstudio.ai/) for running a server with local Large Language Models 30 | 31 | 32 | ## Getting Started with [LM Studio](https://lmstudio.ai/) 33 | 34 | [LM Studio](https://lmstudio.ai/) is a desktop application that allows you to run open-source models locally on your computer. You can use LM Studio to discover, download, and chat with models from Hugging Face, or create your own custom models. LM Studio also lets you run a local inference server that mimics the OpenAI API, so you can use any model with your favorite tools and frameworks. LM Studio is available for Mac, Windows, and Linux, and you can download it from their website. 35 | 36 | ![Search for models in LM Studio](/images/20LMStudioSearchModel.png "Search for models in LM Studio") 37 | 38 | 39 | ### Download models locally and run a local inference server with LM Studio 40 | Here are the steps to run a local server with LM Studio 41 | 42 | 1. Launch LM Studio and search for a LLM from **Hugging Face** using the search bar. You can filter the models by compatibility, popularity, or quantization level. 43 | For this demo we will use Phi-2. 44 | 45 | 1. Select a model and click **Download**. You can also view the model card for more information about the model. 46 | 47 | 1. Once the model is downloaded, go to the **Local Server section** and select the model from the drop-down menu. You can also adjust the server settings and parameters as you wish. 48 | 49 | 1. Click **Start Server** to run the model on your local machine. You will see a URL that you can use to access the server from your browser or other applications. 50 | 51 | ***Important:** The server is compatible with the OpenAI API, so you can use the same code and format for your requests and responses.* 52 | 53 | 1. To stop the server, click **Stop Server**. You can also delete the model from your machine if you don’t need it anymore. 54 | 55 | ![Local Inference Server running in LM Studio](/images/22ServerRunning.png "Local Inference Server running in LM Studio") 56 | 57 | ## Phi-2 58 | 59 | Phi-2 is a small language model (SLM) developed by Microsoft Research that has 2.7 billion parameters and demonstrates outstanding reasoning and language understanding capabilities. It was trained on a mix of synthetic and web datasets for natural language processing and coding. It achieves state-of-the-art performance among base language models with less than 13 billion parameters and matches or outperforms models up to 25x larger on complex benchmarks. We can use Phi-2 to generate text, code, or chat with it using the Azure AI Studio or the Hugging Face platform⁴. 😊 60 | 61 | Here are some additional resources related to Phi-2: 62 | 63 | - Phi-2: The surprising power of small language models. https://www.microsoft.com/en-us/research/blog/phi-2-the-surprising-power-of-small-language-models/ 64 | - Microsoft/phi-2 · Hugging Face. https://huggingface.co/microsoft/phi-2 65 | 66 | ## Run Local 67 | 1. Start the LM Studio Local Inference Server running with Phi-2. 68 | 69 | 1. Open `src/sk-phi2-localserver-lmstudio/Program.cs`. 70 | 71 | Press [F5] To Start Debugging. Choose your prefered Debugger. 72 | 73 | 1. Once the project is compiled, the app should be running. 74 | 75 | Check the logs to see the chat interaction. You can also check LM Studio logs to validate the LLM model outpus. 76 | ![Run simple demo](/images/30RunSimpleDemo.png "Run Simple demo") 77 | 78 | ## Run in Codespaces 79 | 80 | 1. Click here to open in GitHub Codespaces 81 | 82 | [![Open in GitHub Codespaces](https://img.shields.io/static/v1?style=for-the-badge&label=GitHub+Codespaces&message=Open&color=lightgrey&logo=github)](https://codespaces.new/elbruno/sk-phi2-localserver-lmstudio) 83 | 84 | 1. This action may take a couple of minutes. Once the Codespaces is initialized, check the Extensions tab and check that all extensions are installed. 85 | 86 | 1. The file `src/sk-phi2-localserver-lmstudio/Program.cs` should be open. If not, open it using the ***Explorer*** option from the Right Sidebar. 87 | 88 | 1. Using the the ***Run and Debug*** option, run the program. Select "C# as the run option". 89 | 90 | 1. Run the app and check the CodeSpaces terminal and the LM Studio logs. 91 | 92 | 93 | ## Advanced chat demo. 94 | 95 | Update the file `src/sk-phi2-localserver-lmstudio/Program.cs` with the following code. This will run a small interactive chat using Phi-2 as the backend model. 96 | 97 | ```csharp 98 | // init chat 99 | var chat = kernel.GetRequiredService(); 100 | var history = new ChatHistory(); 101 | history.AddSystemMessage("You are a useful assistant that replies using a funny style.You answer with short messages. Your name is Goku."); 102 | Console.WriteLine("Hint: type your question or type 'exit' to leave the conversation"); 103 | 104 | // chat loop 105 | while (true) 106 | { 107 | Console.Write("You: "); 108 | var input = Console.ReadLine(); 109 | if (string.IsNullOrEmpty(input) || input.ToLower() == "exit") 110 | break; 111 | history.AddUserMessage(input); 112 | history = (ChatHistory) await chat.GetChatMessageContentsAsync(history); 113 | Console.WriteLine(history[^1].Content); 114 | Console.WriteLine("---"); 115 | } 116 | 117 | Console.WriteLine("Goodbye!"); 118 | ``` 119 | 120 | The running app should be similar to this: 121 | 122 | ![Chat complete demo](/images/40chatfulldemo.gif "Chat complete demo") 123 | 124 | 125 | ## Trouble shooting 126 | 127 | 1. *Important**: If your codespaces can't access the localhost endpoint, you may get an error similar to this one. 128 | 129 | 130 | ![Codespaces can't access localhost error](/images/35Localhostnetworkerror.png "Codespaces can't access localhost error") 131 | 132 | In order to solve this problem, you can use the **[Codespaces Network Bridge]("https://github.com/github/gh-net#codespaces-network-bridge")**. 133 | 134 | The following command will connect the codespaces with your local machine ports: 135 | ```bash 136 | gh net start --your codespace-- 137 | ``` 138 | 139 | ## Author 140 | 141 | 👤 **Bruno Capuano** 142 | 143 | * Website: https://elbruno.com 144 | * Twitter: [@elbruno](https://twitter.com/elbruno) 145 | * Github: [@elbruno](https://github.com/elbruno) 146 | * LinkedIn: [@elbruno](https://linkedin.com/in/elbruno) 147 | 148 | ## 🤝 Contributing 149 | 150 | Contributions, issues and feature requests are welcome! 151 | 152 | Feel free to check [issues page](https://github.com/elbruno/sk-phi2-localserver-lmstudio/issues). 153 | 154 | ## Show your support 155 | 156 | Give a ⭐️ if this project helped you! 157 | 158 | 159 | ## 📝 License 160 | 161 | Copyright © 2024 [Bruno Capuano](https://github.com/elbruno). 162 | 163 | This project is [MIT](/LICENSE) licensed. 164 | 165 | *** -------------------------------------------------------------------------------- /images/20LMStudioSearchModel.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/elbruno/semantickernel-localLLMs/7a7c96e4f53b8ec1db2544c2e610eed1b683fc69/images/20LMStudioSearchModel.png -------------------------------------------------------------------------------- /images/22ServerRunning.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/elbruno/semantickernel-localLLMs/7a7c96e4f53b8ec1db2544c2e610eed1b683fc69/images/22ServerRunning.png -------------------------------------------------------------------------------- /images/30RunSimpleDemo.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/elbruno/semantickernel-localLLMs/7a7c96e4f53b8ec1db2544c2e610eed1b683fc69/images/30RunSimpleDemo.png -------------------------------------------------------------------------------- /images/35Localhostnetworkerror.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/elbruno/semantickernel-localLLMs/7a7c96e4f53b8ec1db2544c2e610eed1b683fc69/images/35Localhostnetworkerror.png -------------------------------------------------------------------------------- /images/40chatfulldemo.gif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/elbruno/semantickernel-localLLMs/7a7c96e4f53b8ec1db2544c2e610eed1b683fc69/images/40chatfulldemo.gif -------------------------------------------------------------------------------- /images/SK-LM-Phi2-demo.gif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/elbruno/semantickernel-localLLMs/7a7c96e4f53b8ec1db2544c2e610eed1b683fc69/images/SK-LM-Phi2-demo.gif -------------------------------------------------------------------------------- /src/sk-customLLM-customHttpClient/Program.cs: -------------------------------------------------------------------------------- 1 | // Copyright (c) 2024 2 | // Author : Bruno Capuano 3 | // Change Log : 4 | // - This class demonstrates the usage of a custom HTTP client and the integration of the OpenAI Chat Completion API with the Semantic Kernel library. It creates a custom HTTP client, initializes a kernel with the OpenAI Chat Completion API, and invokes a prompt to generate a response. It also demonstrates the initialization of a chat session and prints the response from the chat service. 5 | // 6 | // The MIT License (MIT) 7 | // 8 | // Permission is hereby granted, free of charge, to any person obtaining a copy 9 | // of this software and associated documentation files (the "Software"), to deal 10 | // in the Software without restriction, including without limitation the rights 11 | // to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 12 | // copies of the Software, and to permit persons to whom the Software is 13 | // furnished to do so, subject to the following conditions: 14 | // 15 | // The above copyright notice and this permission notice shall be included in 16 | // all copies or substantial portions of the Software. 17 | // 18 | // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 19 | // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 20 | // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 21 | // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 22 | // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 23 | // OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN 24 | // THE SOFTWARE. 25 | 26 | using sk_customllm; 27 | using Microsoft.SemanticKernel; 28 | using Microsoft.SemanticKernel.ChatCompletion; 29 | 30 | // create custom http client 31 | var customHttpMessageHandler = new CustomHttpMessageHandler(); 32 | customHttpMessageHandler.CustomLlmUrl = "http://localhost:11434"; 33 | HttpClient client = new HttpClient(customHttpMessageHandler); 34 | 35 | // create kernel 36 | var builder = Kernel.CreateBuilder(); 37 | builder.AddOpenAIChatCompletion("llama2", "api-key", httpClient: client); 38 | var kernel = builder.Build(); 39 | 40 | // invoke a simple prompt to the chat service 41 | string prompt = "Write a joke about kittens"; 42 | var response = await kernel.InvokePromptAsync(prompt); 43 | Console.WriteLine(response.GetValue()); 44 | 45 | // init chat 46 | var chat = kernel.GetRequiredService(); 47 | var history = new ChatHistory(); 48 | history.AddSystemMessage("You are a useful assistant that replies using a funny style and emojis. Your name is Goku."); 49 | history.AddUserMessage("hi, who are you?"); 50 | 51 | // print response 52 | var result = await chat.GetChatMessageContentsAsync(history); 53 | Console.WriteLine(result[^1].Content); -------------------------------------------------------------------------------- /src/sk-customLLM-customHttpClient/sk-customLLM-customHttpClient.csproj: -------------------------------------------------------------------------------- 1 |  2 | 3 | 4 | Exe 5 | net8.0 6 | sk_customLLM_customHttpClient 7 | enable 8 | enable 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | -------------------------------------------------------------------------------- /src/sk-customllm.sln: -------------------------------------------------------------------------------- 1 |  2 | Microsoft Visual Studio Solution File, Format Version 12.00 3 | # Visual Studio Version 17 4 | VisualStudioVersion = 17.0.31903.59 5 | MinimumVisualStudioVersion = 10.0.40219.1 6 | Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "sk-phi2-localserver-lmstudio", "sk-phi2-localserver-lmstudio\sk-phi2-localserver-lmstudio.csproj", "{B03C7CC5-9BE6-4D72-A7C0-A6375A3B09F6}" 7 | EndProject 8 | Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "sk-ollama-localserver-ubuntu", "sk-ollama-localserver-ubuntu\sk-ollama-localserver-ubuntu.csproj", "{80253A50-6AEA-428C-A936-30FB6AC24D7D}" 9 | EndProject 10 | Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "sk-customllm", "sk-customllm\sk-customllm.csproj", "{29F02940-CAA3-4DF1-9DA2-7EA71A443AE8}" 11 | EndProject 12 | Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "sk-ollama-localserver-rpi", "sk-ollama-localserver-rpi\sk-ollama-localserver-rpi.csproj", "{7757F5B5-4980-4423-BF94-3994F03F2C9B}" 13 | EndProject 14 | Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "sk-ollamacsharp", "sk-ollamacsharp\sk-ollamacsharp.csproj", "{647242A7-82DE-47C3-8004-892D4A0EBF16}" 15 | EndProject 16 | Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "sk-customLLM-customHttpClient", "sk-customLLM-customHttpClient\sk-customLLM-customHttpClient.csproj", "{8736397C-11D2-4486-8DDB-41138154305E}" 17 | EndProject 18 | Global 19 | GlobalSection(SolutionConfigurationPlatforms) = preSolution 20 | Debug|Any CPU = Debug|Any CPU 21 | Release|Any CPU = Release|Any CPU 22 | EndGlobalSection 23 | GlobalSection(ProjectConfigurationPlatforms) = postSolution 24 | {B03C7CC5-9BE6-4D72-A7C0-A6375A3B09F6}.Debug|Any CPU.ActiveCfg = Debug|Any CPU 25 | {B03C7CC5-9BE6-4D72-A7C0-A6375A3B09F6}.Debug|Any CPU.Build.0 = Debug|Any CPU 26 | {B03C7CC5-9BE6-4D72-A7C0-A6375A3B09F6}.Release|Any CPU.ActiveCfg = Release|Any CPU 27 | {B03C7CC5-9BE6-4D72-A7C0-A6375A3B09F6}.Release|Any CPU.Build.0 = Release|Any CPU 28 | {80253A50-6AEA-428C-A936-30FB6AC24D7D}.Debug|Any CPU.ActiveCfg = Debug|Any CPU 29 | {80253A50-6AEA-428C-A936-30FB6AC24D7D}.Debug|Any CPU.Build.0 = Debug|Any CPU 30 | {80253A50-6AEA-428C-A936-30FB6AC24D7D}.Release|Any CPU.ActiveCfg = Release|Any CPU 31 | {80253A50-6AEA-428C-A936-30FB6AC24D7D}.Release|Any CPU.Build.0 = Release|Any CPU 32 | {29F02940-CAA3-4DF1-9DA2-7EA71A443AE8}.Debug|Any CPU.ActiveCfg = Debug|Any CPU 33 | {29F02940-CAA3-4DF1-9DA2-7EA71A443AE8}.Debug|Any CPU.Build.0 = Debug|Any CPU 34 | {29F02940-CAA3-4DF1-9DA2-7EA71A443AE8}.Release|Any CPU.ActiveCfg = Release|Any CPU 35 | {29F02940-CAA3-4DF1-9DA2-7EA71A443AE8}.Release|Any CPU.Build.0 = Release|Any CPU 36 | {7757F5B5-4980-4423-BF94-3994F03F2C9B}.Debug|Any CPU.ActiveCfg = Debug|Any CPU 37 | {7757F5B5-4980-4423-BF94-3994F03F2C9B}.Debug|Any CPU.Build.0 = Debug|Any CPU 38 | {7757F5B5-4980-4423-BF94-3994F03F2C9B}.Release|Any CPU.ActiveCfg = Release|Any CPU 39 | {7757F5B5-4980-4423-BF94-3994F03F2C9B}.Release|Any CPU.Build.0 = Release|Any CPU 40 | {647242A7-82DE-47C3-8004-892D4A0EBF16}.Debug|Any CPU.ActiveCfg = Debug|Any CPU 41 | {647242A7-82DE-47C3-8004-892D4A0EBF16}.Debug|Any CPU.Build.0 = Debug|Any CPU 42 | {647242A7-82DE-47C3-8004-892D4A0EBF16}.Release|Any CPU.ActiveCfg = Release|Any CPU 43 | {647242A7-82DE-47C3-8004-892D4A0EBF16}.Release|Any CPU.Build.0 = Release|Any CPU 44 | {8736397C-11D2-4486-8DDB-41138154305E}.Debug|Any CPU.ActiveCfg = Debug|Any CPU 45 | {8736397C-11D2-4486-8DDB-41138154305E}.Debug|Any CPU.Build.0 = Debug|Any CPU 46 | {8736397C-11D2-4486-8DDB-41138154305E}.Release|Any CPU.ActiveCfg = Release|Any CPU 47 | {8736397C-11D2-4486-8DDB-41138154305E}.Release|Any CPU.Build.0 = Release|Any CPU 48 | EndGlobalSection 49 | GlobalSection(SolutionProperties) = preSolution 50 | HideSolutionNode = FALSE 51 | EndGlobalSection 52 | GlobalSection(ExtensibilityGlobals) = postSolution 53 | SolutionGuid = {C9A0F90D-7758-4F0E-A3B4-084D94B49C60} 54 | EndGlobalSection 55 | EndGlobal 56 | -------------------------------------------------------------------------------- /src/sk-customllm/CustomChatCompletionService.cs: -------------------------------------------------------------------------------- 1 | using Microsoft.SemanticKernel; 2 | using Microsoft.SemanticKernel.ChatCompletion; 3 | using System.Net.Http.Headers; 4 | using System.Text.Json; 5 | using sk_customllm.Models; 6 | 7 | namespace sk_customllm 8 | { 9 | public class CustomChatCompletionService : IChatCompletionService 10 | { 11 | // public property for the model url endpoint 12 | public string ModelUrl { get; set; } 13 | public string ModelName { get; set; } 14 | 15 | public IReadOnlyDictionary Attributes => throw new NotImplementedException(); 16 | 17 | public async Task> GetChatMessageContentsAsync(ChatHistory chatHistory, PromptExecutionSettings? executionSettings = null, Kernel? kernel = null, CancellationToken cancellationToken = default) 18 | { 19 | using (var httpClient = new HttpClient()) 20 | { 21 | using (var request = new HttpRequestMessage(new HttpMethod("POST"), ModelUrl)) 22 | { 23 | 24 | // iterate though chatHistory and generate a json document based on the Root class 25 | var root = new ChatRequest(); 26 | for (int i = 0; i < chatHistory.Count; i++) 27 | { 28 | var message = chatHistory[i]; 29 | var msg = new ChatMessage(); 30 | msg.role = message.Role.ToString().ToLower(); 31 | msg.content = message.Content; 32 | root.messages.Add(msg); 33 | } 34 | 35 | // validate if ModelName is not empty and add it to the root object 36 | if (!string.IsNullOrEmpty(ModelName)) 37 | { 38 | root.model = ModelName; 39 | } 40 | 41 | // generate the json string from the root object 42 | var jsonString = JsonSerializer.Serialize(root); 43 | request.Content = new StringContent(jsonString); 44 | request.Content.Headers.ContentType = MediaTypeHeaderValue.Parse("application/json"); 45 | var httpResponse = await httpClient.SendAsync(request); 46 | 47 | // get the response content 48 | var responseContent = await httpResponse.Content.ReadAsStringAsync(); 49 | 50 | // deserialize the response content into a ChatResponse object 51 | var chatResponse = JsonSerializer.Deserialize(responseContent); 52 | 53 | // add httpResponse content to chatHistory 54 | chatHistory.AddAssistantMessage(chatResponse.choices[0].message.content); 55 | } 56 | } 57 | 58 | return chatHistory; 59 | } 60 | 61 | public IAsyncEnumerable GetStreamingChatMessageContentsAsync(ChatHistory chatHistory, PromptExecutionSettings? executionSettings = null, Kernel? kernel = null, CancellationToken cancellationToken = default) 62 | { 63 | throw new NotImplementedException(); 64 | } 65 | 66 | } 67 | } 68 | -------------------------------------------------------------------------------- /src/sk-customllm/CustomHttpMessageHandler.cs: -------------------------------------------------------------------------------- 1 | // Copyright (c) 2024 2 | // Author : Bruno Capuano 3 | // Change Log : 4 | // - The CustomHttpMessageHandler class extends HttpClientHandler and overrides the SendAsync method to modify the request URI based on a specified CustomLlmUrl if the host matches certain conditions. 5 | // 6 | // The MIT License (MIT) 7 | // 8 | // Permission is hereby granted, free of charge, to any person obtaining a copy 9 | // of this software and associated documentation files (the "Software"), to deal 10 | // in the Software without restriction, including without limitation the rights 11 | // to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 12 | // copies of the Software, and to permit persons to whom the Software is 13 | // furnished to do so, subject to the following conditions: 14 | // 15 | // The above copyright notice and this permission notice shall be included in 16 | // all copies or substantial portions of the Software. 17 | // 18 | // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 19 | // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 20 | // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 21 | // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 22 | // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 23 | // OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN 24 | // THE SOFTWARE. 25 | 26 | namespace sk_customllm 27 | { 28 | public class CustomHttpMessageHandler : HttpClientHandler 29 | { 30 | public string CustomLlmUrl { get; set; } 31 | 32 | protected override Task SendAsync(HttpRequestMessage request, CancellationToken cancellationToken) 33 | { 34 | string[] urls = { "api.openai.com", "openai.azure.com" }; 35 | 36 | // validate if request.RequestUri is not null and request.RequestUri.Host is in urls 37 | if (request.RequestUri != null && urls.Contains(request.RequestUri.Host)) 38 | { 39 | // set request.RequestUri to a new Uri with the LLMUrl and request.RequestUri.PathAndQuery 40 | request.RequestUri = new Uri($"{CustomLlmUrl}{request.RequestUri.PathAndQuery}"); 41 | } 42 | 43 | return base.SendAsync(request, cancellationToken); 44 | } 45 | } 46 | 47 | } 48 | -------------------------------------------------------------------------------- /src/sk-customllm/Models/ChatMessage.cs: -------------------------------------------------------------------------------- 1 | namespace sk_customllm.Models 2 | { 3 | public class ChatMessage 4 | { 5 | public string role { get; set; } 6 | public string content { get; set; } 7 | } 8 | } 9 | -------------------------------------------------------------------------------- /src/sk-customllm/Models/ChatRequest.cs: -------------------------------------------------------------------------------- 1 | namespace sk_customllm.Models 2 | { 3 | public class ChatRequest 4 | { 5 | public ChatRequest() 6 | { 7 | messages = new List(); 8 | temperature = 0.7f; 9 | max_tokens = 2500; 10 | stream = false; 11 | model = ""; 12 | } 13 | public string model { get; set; } 14 | public List messages { get; set; } 15 | public float temperature { get; set; } 16 | public int max_tokens { get; set; } 17 | public bool stream { get; set; } 18 | 19 | } 20 | } 21 | -------------------------------------------------------------------------------- /src/sk-customllm/Models/ChatResponse.cs: -------------------------------------------------------------------------------- 1 | namespace sk_customllm.Models 2 | { 3 | // Root myDeserializedClass = JsonConvert.DeserializeObject(myJsonResponse); 4 | public class ChatResponseChoice 5 | { 6 | public int ChatResponseindex { get; set; } 7 | public ChatResponseMessage message { get; set; } 8 | public string finish_reason { get; set; } 9 | } 10 | 11 | public class ChatResponseMessage 12 | { 13 | public string role { get; set; } 14 | public string content { get; set; } 15 | } 16 | 17 | public class ChatResponse 18 | { 19 | public string id { get; set; } 20 | public string @object { get; set; } 21 | public int created { get; set; } 22 | public string model { get; set; } 23 | public List choices { get; set; } 24 | public ChatResponseUsage usage { get; set; } 25 | } 26 | 27 | public class ChatResponseUsage 28 | { 29 | public int prompt_tokens { get; set; } 30 | public int completion_tokens { get; set; } 31 | public int total_tokens { get; set; } 32 | } 33 | } 34 | -------------------------------------------------------------------------------- /src/sk-customllm/sk-customllm.csproj: -------------------------------------------------------------------------------- 1 |  2 | 3 | 4 | net8.0 5 | sk_customllm 6 | enable 7 | enable 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | -------------------------------------------------------------------------------- /src/sk-ollama-localserver-rpi/Program.cs: -------------------------------------------------------------------------------- 1 | using sk_customllm; 2 | using Microsoft.SemanticKernel; 3 | using Microsoft.SemanticKernel.ChatCompletion; 4 | using Microsoft.Extensions.DependencyInjection; 5 | 6 | // create a new custom chat conpletion service using the url http://localhost:5000:1234/api/chat 7 | var ollamaChat = new CustomChatCompletionService(); 8 | ollamaChat.ModelUrl = "http://localhost:5000:1234/api/chat"; 9 | ollamaChat.ModelName = "llama2"; 10 | 11 | // create kernel 12 | var builder = Kernel.CreateBuilder(); 13 | builder.Services.AddKeyedSingleton("ollamaChat", ollamaChat); 14 | var kernel = builder.Build(); 15 | 16 | // init chat 17 | var chat = kernel.GetRequiredService(); 18 | var history = new ChatHistory(); 19 | history.AddSystemMessage("You are a useful assistant that replies using a funny style and emojis. Your name is Goku."); 20 | history.AddUserMessage("hi, who are you?"); 21 | 22 | // print response 23 | var result = await chat.GetChatMessageContentsAsync(history); 24 | Console.WriteLine(result[^1].Content); -------------------------------------------------------------------------------- /src/sk-ollama-localserver-rpi/sk-ollama-localserver-rpi.csproj: -------------------------------------------------------------------------------- 1 |  2 | 3 | 4 | Exe 5 | net8.0 6 | sk_ollama_localserver_rpi 7 | enable 8 | enable 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | -------------------------------------------------------------------------------- /src/sk-ollama-localserver-ubuntu/Program.cs: -------------------------------------------------------------------------------- 1 | // Copyright (c) 2024 2 | // Author : Bruno Capuano 3 | // Change Log : 4 | // - Sample console application to use llama2 LLM running locally in Ubuntu with Semantic Kernel 5 | // 6 | // The MIT License (MIT) 7 | // 8 | // Permission is hereby granted, free of charge, to any person obtaining a copy 9 | // of this software and associated documentation files (the "Software"), to deal 10 | // in the Software without restriction, including without limitation the rights 11 | // to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 12 | // copies of the Software, and to permit persons to whom the Software is 13 | // furnished to do so, subject to the following conditions: 14 | // 15 | // The above copyright notice and this permission notice shall be included in 16 | // all copies or substantial portions of the Software. 17 | // 18 | // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 19 | // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 20 | // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 21 | // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 22 | // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 23 | // OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN 24 | // THE SOFTWARE. 25 | 26 | using Microsoft.Extensions.DependencyInjection; 27 | using Microsoft.SemanticKernel; 28 | using Microsoft.SemanticKernel.ChatCompletion; 29 | using Microsoft.SemanticKernel.TextGeneration; 30 | using sk_ollamacsharp; 31 | 32 | // llama2 in Ubuntu local in WSL 33 | var ollamaChat = new OllamaChatCompletionService(); 34 | ollamaChat.ModelUrl = "http://localhost:11434"; 35 | ollamaChat.ModelName = "llama2"; 36 | 37 | var ollamaText = new OllamaTextGenerationService(); 38 | ollamaText.ModelUrl = "http://localhost:11434"; 39 | ollamaText.ModelName = "llama2"; 40 | 41 | // semantic kernel builder 42 | var builder = Kernel.CreateBuilder(); 43 | builder.Services.AddKeyedSingleton("ollamaChat", ollamaChat); 44 | builder.Services.AddKeyedSingleton("ollamaText", ollamaText); 45 | var kernel = builder.Build(); 46 | 47 | // text generation 48 | Console.WriteLine("===================="); 49 | Console.WriteLine("TEXT GENERATION DEMO"); 50 | Console.WriteLine("===================="); 51 | var textGen = kernel.GetRequiredService(); 52 | var response = textGen.GetTextContentsAsync("The weather in January in Toronto is usually ").Result; 53 | Console.WriteLine(response[^1].Text); 54 | 55 | // chat 56 | Console.WriteLine("===================="); 57 | Console.WriteLine("CHAT COMPLETION DEMO"); 58 | Console.WriteLine("===================="); 59 | var chat = kernel.GetRequiredService(); 60 | var history = new ChatHistory(); 61 | history.AddSystemMessage("You are a useful assistant that replies using a funny style and emojis. Your name is Goku."); 62 | history.AddUserMessage("hi, who are you?"); 63 | 64 | // print response 65 | var result = await chat.GetChatMessageContentsAsync(history); 66 | Console.WriteLine(result[^1].Content); -------------------------------------------------------------------------------- /src/sk-ollama-localserver-ubuntu/sk-ollama-localserver-ubuntu.csproj: -------------------------------------------------------------------------------- 1 |  2 | 3 | 4 | Exe 5 | net8.0 6 | sk_ollama_localserver_ubuntu 7 | enable 8 | enable 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | -------------------------------------------------------------------------------- /src/sk-ollamacsharp/OllamaChatCompletionService.cs: -------------------------------------------------------------------------------- 1 | // Copyright (c) 2024 2 | // Author : Bruno Capuano 3 | // Change Log : 4 | // - Sample Chat Completion Service for Ollama models 5 | // 6 | // The MIT License (MIT) 7 | // 8 | // Permission is hereby granted, free of charge, to any person obtaining a copy 9 | // of this software and associated documentation files (the "Software"), to deal 10 | // in the Software without restriction, including without limitation the rights 11 | // to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 12 | // copies of the Software, and to permit persons to whom the Software is 13 | // furnished to do so, subject to the following conditions: 14 | // 15 | // The above copyright notice and this permission notice shall be included in 16 | // all copies or substantial portions of the Software. 17 | // 18 | // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 19 | // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 20 | // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 21 | // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 22 | // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 23 | // OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN 24 | // THE SOFTWARE. 25 | 26 | using Microsoft.SemanticKernel; 27 | using Microsoft.SemanticKernel.ChatCompletion; 28 | using OllamaSharp; 29 | using OllamaSharp.Models.Chat; 30 | 31 | namespace sk_ollamacsharp 32 | { 33 | public class OllamaChatCompletionService : IChatCompletionService 34 | { 35 | // public property for the model url endpoint 36 | public string ModelUrl { get; set; } 37 | public string ModelName { get; set; } 38 | 39 | public IReadOnlyDictionary Attributes => throw new NotImplementedException(); 40 | 41 | public async Task> GetChatMessageContentsAsync(ChatHistory chatHistory, PromptExecutionSettings? executionSettings = null, Kernel? kernel = null, CancellationToken cancellationToken = default) 42 | { 43 | var ollama = new OllamaApiClient(ModelUrl, ModelName); // (uri); 44 | 45 | var chat = new Chat(ollama, _ => { }); 46 | 47 | 48 | // iterate though chatHistory Messages 49 | foreach (var message in chatHistory) 50 | { 51 | if (message.Role == AuthorRole.System) 52 | { 53 | await chat.SendAs(ChatRole.System, message.Content); 54 | continue; 55 | } 56 | } 57 | 58 | var lastMessage = chatHistory.LastOrDefault(); 59 | 60 | string question = lastMessage.Content; 61 | var chatResponse = ""; 62 | var history = (await chat.Send(question, CancellationToken.None)).ToArray(); 63 | 64 | var last = history.Last(); 65 | chatResponse = last.Content; 66 | 67 | chatHistory.AddAssistantMessage(chatResponse); 68 | 69 | return chatHistory; 70 | } 71 | 72 | public IAsyncEnumerable GetStreamingChatMessageContentsAsync(ChatHistory chatHistory, PromptExecutionSettings? executionSettings = null, Kernel? kernel = null, CancellationToken cancellationToken = default) 73 | { 74 | throw new NotImplementedException(); 75 | } 76 | 77 | } 78 | } 79 | -------------------------------------------------------------------------------- /src/sk-ollamacsharp/OllamaTextGenerationService.cs: -------------------------------------------------------------------------------- 1 | // Copyright (c) 2024 2 | // Author : Bruno Capuano 3 | // Change Log : 4 | // - Sample Text Generation Service for Ollama models 5 | // 6 | // The MIT License (MIT) 7 | // 8 | // Permission is hereby granted, free of charge, to any person obtaining a copy 9 | // of this software and associated documentation files (the "Software"), to deal 10 | // in the Software without restriction, including without limitation the rights 11 | // to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 12 | // copies of the Software, and to permit persons to whom the Software is 13 | // furnished to do so, subject to the following conditions: 14 | // 15 | // The above copyright notice and this permission notice shall be included in 16 | // all copies or substantial portions of the Software. 17 | // 18 | // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 19 | // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 20 | // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 21 | // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 22 | // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 23 | // OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN 24 | // THE SOFTWARE. 25 | 26 | using Microsoft.SemanticKernel; 27 | using Microsoft.SemanticKernel.ChatCompletion; 28 | using OllamaSharp; 29 | using OllamaSharp.Models.Chat; 30 | 31 | namespace sk_ollamacsharp 32 | { 33 | public class OllamaTextGenerationService : Microsoft.SemanticKernel.TextGeneration.ITextGenerationService 34 | { 35 | // public property for the model url endpoint 36 | public string ModelUrl { get; set; } 37 | public string ModelName { get; set; } 38 | 39 | public IReadOnlyDictionary Attributes => throw new NotImplementedException(); 40 | 41 | public IAsyncEnumerable GetStreamingTextContentsAsync(string prompt, PromptExecutionSettings? executionSettings = null, Kernel? kernel = null, CancellationToken cancellationToken = default) 42 | { 43 | throw new NotImplementedException(); 44 | } 45 | 46 | public async Task> GetTextContentsAsync(string prompt, PromptExecutionSettings? executionSettings = null, Kernel? kernel = null, CancellationToken cancellationToken = default) 47 | { 48 | var ollama = new OllamaApiClient(ModelUrl, ModelName); 49 | 50 | var completionResponse = await ollama.GetCompletion(prompt, null, CancellationToken.None); 51 | 52 | TextContent stc = new TextContent(completionResponse.Response); 53 | return new List { stc }; 54 | } 55 | } 56 | } 57 | -------------------------------------------------------------------------------- /src/sk-ollamacsharp/sk-ollamacsharp.csproj: -------------------------------------------------------------------------------- 1 |  2 | 3 | 4 | net8.0 5 | sk_ollamacsharp 6 | enable 7 | enable 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | -------------------------------------------------------------------------------- /src/sk-phi2-localserver-lmstudio/Program.cs: -------------------------------------------------------------------------------- 1 | // Copyright (c) 2024 2 | // Author : Bruno Capuano 3 | // Change Log : 4 | // - Sample console application to use Phi-2 in LM Studio with Semantic Kernel 5 | // 6 | // The MIT License (MIT) 7 | // 8 | // Permission is hereby granted, free of charge, to any person obtaining a copy 9 | // of this software and associated documentation files (the "Software"), to deal 10 | // in the Software without restriction, including without limitation the rights 11 | // to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 12 | // copies of the Software, and to permit persons to whom the Software is 13 | // furnished to do so, subject to the following conditions: 14 | // 15 | // The above copyright notice and this permission notice shall be included in 16 | // all copies or substantial portions of the Software. 17 | // 18 | // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 19 | // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 20 | // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 21 | // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 22 | // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 23 | // OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN 24 | // THE SOFTWARE. 25 | 26 | using Microsoft.Extensions.DependencyInjection; 27 | using Microsoft.SemanticKernel; 28 | using Microsoft.SemanticKernel.ChatCompletion; 29 | using sk_customllm; 30 | 31 | // Phi-2 in LM Studio 32 | var phi2 = new CustomChatCompletionService(); 33 | phi2.ModelUrl = "http://localhost:1234/v1/chat/completions"; 34 | 35 | // semantic kernel builder 36 | var builder = Kernel.CreateBuilder(); 37 | builder.Services.AddKeyedSingleton("phi2Chat", phi2); 38 | var kernel = builder.Build(); 39 | 40 | // init chat 41 | var chat = kernel.GetRequiredService(); 42 | var history = new ChatHistory(); 43 | history.AddSystemMessage("You are a useful assistant that replies using a funny style and emojis. Your name is Goku."); 44 | history.AddUserMessage("hi, who are you?"); 45 | 46 | // print response 47 | var result = await chat.GetChatMessageContentsAsync(history); 48 | Console.WriteLine(result[^1].Content); 49 | 50 | // ADVANCED CHAT DEMO 51 | // // init chat 52 | // var chat = kernel.GetRequiredService(); 53 | // var history = new ChatHistory(); 54 | // history.AddSystemMessage("You are a useful assistant that replies with short messages."); 55 | // Console.WriteLine("Hint: type your question or type 'exit' to leave the conversation"); 56 | 57 | // // chat loop 58 | // while (true) 59 | // { 60 | // Console.Write("You: "); 61 | // var input = Console.ReadLine(); 62 | // if (string.IsNullOrEmpty(input) || input.ToLower() == "exit") 63 | // break; 64 | // history.AddUserMessage(input); 65 | // history = (ChatHistory)await chat.GetChatMessageContentsAsync(history); 66 | // Console.WriteLine(history[^1].Content); 67 | // Console.WriteLine("---"); 68 | // } 69 | 70 | // Console.WriteLine("Goodbye!"); -------------------------------------------------------------------------------- /src/sk-phi2-localserver-lmstudio/sk-phi2-localserver-lmstudio.csproj: -------------------------------------------------------------------------------- 1 |  2 | 3 | 4 | Exe 5 | net8.0 6 | enable 7 | enable 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | --------------------------------------------------------------------------------