├── .gitignore ├── .vscode ├── launch.json └── tasks.json ├── LICENSE ├── README.md └── src ├── .vscode ├── launch.json └── tasks.json ├── Configuration ├── AzureCognitiveServicesOptions.cs └── OpenAiServiceOptions.cs ├── ConsoleGPTService.cs ├── Program.cs ├── Skills ├── AzCognitiveServicesSpeechSkill.cs ├── ChatSkill.cs ├── ConsoleSkill.cs └── ISpeechSkill.cs ├── configuration.json └── console-gpt.csproj /.gitignore: -------------------------------------------------------------------------------- 1 | ## Ignore Visual Studio temporary files, build results, and 2 | ## files generated by popular Visual Studio add-ons. 3 | ## 4 | ## Get latest from https://github.com/github/gitignore/blob/master/VisualStudio.gitignore 5 | 6 | # User-specific files 7 | *.rsuser 8 | *.suo 9 | *.user 10 | *.userosscache 11 | *.sln.docstates 12 | 13 | # User-specific files (MonoDevelop/Xamarin Studio) 14 | *.userprefs 15 | 16 | # Mono auto generated files 17 | mono_crash.* 18 | 19 | # Build results 20 | [Dd]ebug/ 21 | [Dd]ebugPublic/ 22 | [Rr]elease/ 23 | [Rr]eleases/ 24 | x64/ 25 | x86/ 26 | [Aa][Rr][Mm]/ 27 | [Aa][Rr][Mm]64/ 28 | bld/ 29 | [Bb]in/ 30 | [Oo]bj/ 31 | [Ll]og/ 32 | [Ll]ogs/ 33 | 34 | # Visual Studio 2015/2017 cache/options directory 35 | .vs/ 36 | # Uncomment if you have tasks that create the project's static files in wwwroot 37 | #wwwroot/ 38 | 39 | # Visual Studio 2017 auto generated files 40 | Generated\ Files/ 41 | 42 | # MSTest test Results 43 | [Tt]est[Rr]esult*/ 44 | [Bb]uild[Ll]og.* 45 | 46 | # NUnit 47 | *.VisualState.xml 48 | TestResult.xml 49 | nunit-*.xml 50 | 51 | # Build Results of an ATL Project 52 | [Dd]ebugPS/ 53 | [Rr]eleasePS/ 54 | dlldata.c 55 | 56 | # Benchmark Results 57 | BenchmarkDotNet.Artifacts/ 58 | 59 | # .NET Core 60 | project.lock.json 61 | project.fragment.lock.json 62 | artifacts/ 63 | 64 | # StyleCop 65 | StyleCopReport.xml 66 | 67 | # Files built by Visual Studio 68 | *_i.c 69 | *_p.c 70 | *_h.h 71 | *.ilk 72 | *.meta 73 | *.obj 74 | *.iobj 75 | *.pch 76 | *.pdb 77 | *.ipdb 78 | *.pgc 79 | *.pgd 80 | *.rsp 81 | *.sbr 82 | *.tlb 83 | *.tli 84 | *.tlh 85 | *.tmp 86 | *.tmp_proj 87 | *_wpftmp.csproj 88 | *.log 89 | *.vspscc 90 | *.vssscc 91 | .builds 92 | *.pidb 93 | *.svclog 94 | *.scc 95 | 96 | # Chutzpah Test files 97 | _Chutzpah* 98 | 99 | # Visual C++ cache files 100 | ipch/ 101 | *.aps 102 | *.ncb 103 | *.opendb 104 | *.opensdf 105 | *.sdf 106 | *.cachefile 107 | *.VC.db 108 | *.VC.VC.opendb 109 | 110 | # Visual Studio profiler 111 | *.psess 112 | *.vsp 113 | *.vspx 114 | *.sap 115 | 116 | # Visual Studio Trace Files 117 | *.e2e 118 | 119 | # TFS 2012 Local Workspace 120 | $tf/ 121 | 122 | # Guidance Automation Toolkit 123 | *.gpState 124 | 125 | # ReSharper is a .NET coding add-in 126 | _ReSharper*/ 127 | *.[Rr]e[Ss]harper 128 | *.DotSettings.user 129 | 130 | # TeamCity is a build add-in 131 | _TeamCity* 132 | 133 | # DotCover is a Code Coverage Tool 134 | *.dotCover 135 | 136 | # AxoCover is a Code Coverage Tool 137 | .axoCover/* 138 | !.axoCover/settings.json 139 | 140 | # Visual Studio code coverage results 141 | *.coverage 142 | *.coveragexml 143 | 144 | # NCrunch 145 | _NCrunch_* 146 | .*crunch*.local.xml 147 | nCrunchTemp_* 148 | 149 | # MightyMoose 150 | *.mm.* 151 | AutoTest.Net/ 152 | 153 | # Web workbench (sass) 154 | .sass-cache/ 155 | 156 | # Installshield output folder 157 | [Ee]xpress/ 158 | 159 | # DocProject is a documentation generator add-in 160 | DocProject/buildhelp/ 161 | DocProject/Help/*.HxT 162 | DocProject/Help/*.HxC 163 | DocProject/Help/*.hhc 164 | DocProject/Help/*.hhk 165 | DocProject/Help/*.hhp 166 | DocProject/Help/Html2 167 | DocProject/Help/html 168 | 169 | # Click-Once directory 170 | publish/ 171 | 172 | # Publish Web Output 173 | *.[Pp]ublish.xml 174 | *.azurePubxml 175 | # Note: Comment the next line if you want to checkin your web deploy settings, 176 | # but database connection strings (with potential passwords) will be unencrypted 177 | *.pubxml 178 | *.publishproj 179 | 180 | # Microsoft Azure Web App publish settings. Comment the next line if you want to 181 | # checkin your Azure Web App publish settings, but sensitive information contained 182 | # in these scripts will be unencrypted 183 | PublishScripts/ 184 | 185 | # NuGet Packages 186 | *.nupkg 187 | # NuGet Symbol Packages 188 | *.snupkg 189 | # The packages folder can be ignored because of Package Restore 190 | **/[Pp]ackages/* 191 | # except build/, which is used as an MSBuild target. 192 | !**/[Pp]ackages/build/ 193 | # Uncomment if necessary however generally it will be regenerated when needed 194 | #!**/[Pp]ackages/repositories.config 195 | # NuGet v3's project.json files produces more ignorable files 196 | *.nuget.props 197 | *.nuget.targets 198 | 199 | # Microsoft Azure Build Output 200 | csx/ 201 | *.build.csdef 202 | 203 | # Microsoft Azure Emulator 204 | ecf/ 205 | rcf/ 206 | 207 | # Windows Store app package directories and files 208 | AppPackages/ 209 | BundleArtifacts/ 210 | Package.StoreAssociation.xml 211 | _pkginfo.txt 212 | *.appx 213 | *.appxbundle 214 | *.appxupload 215 | 216 | # Visual Studio cache files 217 | # files ending in .cache can be ignored 218 | *.[Cc]ache 219 | # but keep track of directories ending in .cache 220 | !?*.[Cc]ache/ 221 | 222 | # Others 223 | ClientBin/ 224 | ~$* 225 | *~ 226 | *.dbmdl 227 | *.dbproj.schemaview 228 | *.jfm 229 | *.pfx 230 | *.publishsettings 231 | orleans.codegen.cs 232 | 233 | # Including strong name files can present a security risk 234 | # (https://github.com/github/gitignore/pull/2483#issue-259490424) 235 | #*.snk 236 | 237 | # Since there are multiple workflows, uncomment next line to ignore bower_components 238 | # (https://github.com/github/gitignore/pull/1529#issuecomment-104372622) 239 | #bower_components/ 240 | 241 | # RIA/Silverlight projects 242 | Generated_Code/ 243 | 244 | # Backup & report files from converting an old project file 245 | # to a newer Visual Studio version. Backup files are not needed, 246 | # because we have git ;-) 247 | _UpgradeReport_Files/ 248 | Backup*/ 249 | UpgradeLog*.XML 250 | UpgradeLog*.htm 251 | ServiceFabricBackup/ 252 | *.rptproj.bak 253 | 254 | # SQL Server files 255 | *.mdf 256 | *.ldf 257 | *.ndf 258 | 259 | # Business Intelligence projects 260 | *.rdl.data 261 | *.bim.layout 262 | *.bim_*.settings 263 | *.rptproj.rsuser 264 | *- [Bb]ackup.rdl 265 | *- [Bb]ackup ([0-9]).rdl 266 | *- [Bb]ackup ([0-9][0-9]).rdl 267 | 268 | # Microsoft Fakes 269 | FakesAssemblies/ 270 | 271 | # GhostDoc plugin setting file 272 | *.GhostDoc.xml 273 | 274 | # Node.js Tools for Visual Studio 275 | .ntvs_analysis.dat 276 | node_modules/ 277 | 278 | # Visual Studio 6 build log 279 | *.plg 280 | 281 | # Visual Studio 6 workspace options file 282 | *.opt 283 | 284 | # Visual Studio 6 auto-generated workspace file (contains which files were open etc.) 285 | *.vbw 286 | 287 | # Visual Studio LightSwitch build output 288 | **/*.HTMLClient/GeneratedArtifacts 289 | **/*.DesktopClient/GeneratedArtifacts 290 | **/*.DesktopClient/ModelManifest.xml 291 | **/*.Server/GeneratedArtifacts 292 | **/*.Server/ModelManifest.xml 293 | _Pvt_Extensions 294 | 295 | # Paket dependency manager 296 | .paket/paket.exe 297 | paket-files/ 298 | 299 | # FAKE - F# Make 300 | .fake/ 301 | 302 | # CodeRush personal settings 303 | .cr/personal 304 | 305 | # Python Tools for Visual Studio (PTVS) 306 | __pycache__/ 307 | *.pyc 308 | 309 | # Cake - Uncomment if you are using it 310 | # tools/** 311 | # !tools/packages.config 312 | 313 | # Tabs Studio 314 | *.tss 315 | 316 | # Telerik's JustMock configuration file 317 | *.jmconfig 318 | 319 | # BizTalk build output 320 | *.btp.cs 321 | *.btm.cs 322 | *.odx.cs 323 | *.xsd.cs 324 | 325 | # OpenCover UI analysis results 326 | OpenCover/ 327 | 328 | # Azure Stream Analytics local run output 329 | ASALocalRun/ 330 | 331 | # MSBuild Binary and Structured Log 332 | *.binlog 333 | 334 | # NVidia Nsight GPU debugger configuration file 335 | *.nvuser 336 | 337 | # MFractors (Xamarin productivity tool) working folder 338 | .mfractor/ 339 | 340 | # Local History for Visual Studio 341 | .localhistory/ 342 | 343 | # BeatPulse healthcheck temp database 344 | healthchecksdb 345 | 346 | # Backup folder for Package Reference Convert tool in Visual Studio 2017 347 | MigrationBackup/ 348 | 349 | # Ionide (cross platform F# VS Code tools) working folder 350 | .ionide/ 351 | .DS_Store 352 | -------------------------------------------------------------------------------- /.vscode/launch.json: -------------------------------------------------------------------------------- 1 | { 2 | "version": "0.2.0", 3 | "configurations": [ 4 | { 5 | // Use IntelliSense to find out which attributes exist for C# debugging 6 | // Use hover for the description of the existing attributes 7 | // For further information visit https://github.com/OmniSharp/omnisharp-vscode/blob/master/debugger-launchjson.md 8 | "name": ".NET Core Launch (console)", 9 | "type": "coreclr", 10 | "request": "launch", 11 | "preLaunchTask": "build", 12 | // If you have changed target frameworks, make sure to update the program path. 13 | "program": "${workspaceFolder}/src/bin/Debug/net7.0/console-gpt.dll", 14 | "args": [], 15 | "cwd": "${workspaceFolder}/src", 16 | // For more information about the 'console' field, see https://aka.ms/VSCode-CS-LaunchJson-Console 17 | "console": "internalConsole", 18 | "stopAtEntry": false 19 | }, 20 | { 21 | "name": ".NET Core Attach", 22 | "type": "coreclr", 23 | "request": "attach" 24 | } 25 | ] 26 | } -------------------------------------------------------------------------------- /.vscode/tasks.json: -------------------------------------------------------------------------------- 1 | { 2 | "version": "2.0.0", 3 | "tasks": [ 4 | { 5 | "label": "build", 6 | "command": "dotnet", 7 | "type": "process", 8 | "args": [ 9 | "build", 10 | "${workspaceFolder}/src/console-gpt.csproj", 11 | "/property:GenerateFullPaths=true", 12 | "/consoleloggerparameters:NoSummary" 13 | ], 14 | "problemMatcher": "$msCompile" 15 | }, 16 | { 17 | "label": "publish", 18 | "command": "dotnet", 19 | "type": "process", 20 | "args": [ 21 | "publish", 22 | "${workspaceFolder}/src/console-gpt.csproj", 23 | "/property:GenerateFullPaths=true", 24 | "/consoleloggerparameters:NoSummary" 25 | ], 26 | "problemMatcher": "$msCompile" 27 | }, 28 | { 29 | "label": "watch", 30 | "command": "dotnet", 31 | "type": "process", 32 | "args": [ 33 | "watch", 34 | "run", 35 | "--project", 36 | "${workspaceFolder}/src/console-gpt.csproj" 37 | ], 38 | "problemMatcher": "$msCompile" 39 | } 40 | ] 41 | } -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2023 Jim Bennett 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # ConsoleGPT 2 | 3 | ChatGPT for your console using Semantic Kernel! 4 | 5 | This is a demo application to show how to use Semantic Kernel in your dotnet applications to interact with Large Language Models like GPT-3.5 to build a chat tool. You can interact with the LLM using your console, or using speech. 6 | 7 | ## Get started 8 | 9 | * Clone this repo 10 | 11 | * Navigate to the `src` folder 12 | 13 | * Get your API keys: 14 | 15 | * You will need an OpenAI key. Head to [platform.openai.com/signup](https://platform.openai.com/signup) to sign up and get an API key. 16 | * If you want to use speech to interact with ConsoleGPT, you will need to create an [Azure Cognitive Services Speech resource](https://ms.portal.azure.com/#create/Microsoft.CognitiveServicesSpeechServices). 17 | 18 | * Set your OpenAI API key as a user secret: 19 | 20 | ```bash 21 | dotnet user-secrets set "OpenAI:Key" "" 22 | ``` 23 | 24 | * If you want to use speech to interact with ConsoleGPT, set your Azure Cognitive services key and region: 25 | 26 | ```bash 27 | dotnet user-secrets set "AzureCognitiveServices:Key" "" 28 | dotnet user-secrets set "AzureCognitiveServices:Region" "" 29 | ``` 30 | 31 | * Run the code with `dotnet run` 32 | 33 | * Ask away! To end the chat say goodbye. 34 | 35 | ```output 36 | ➜ src git:(main) ✗ dotnet run 37 | info: Microsoft.Hosting.Lifetime[0] 38 | Application started. Press Ctrl+C to shut down. 39 | info: Microsoft.Hosting.Lifetime[0] 40 | Hosting environment: Production 41 | info: Microsoft.Hosting.Lifetime[0] 42 | Content root path: /Users/jimbennett/GitHub/console-gpt/src 43 | Hello. Ask me a question or say goodbye to exit. 44 | What is the most recent star wars movie? 45 | The most recent Star Wars movie is "Star Wars: The Rise of Skywalker," which was released in December 2019. It is the ninth and final installment in the Skywalker saga. 46 | Who is the main character? 47 | The main character in "Star Wars: The Rise of Skywalker" is Rey, a powerful Force user who is trying to find her place in the galaxy and confront her past. She is played by actress Daisy Ridley. 48 | Goodbye 49 | Goodbye! Don't hesitate to ask if you have any more questions. 50 | 51 | Chat history: 52 | 53 | System: You are a friendly, intelligent, and curious assistant who is good at conversation. Your name is Orko. 54 | User: What is the most recent star wars movie? 55 | Assistant: The most recent Star Wars movie is "Star Wars: The Rise of Skywalker," which was released in December 2019. It is the ninth and final installment in the Skywalker saga. 56 | User: Who is the main character? 57 | Assistant: The main character in "Star Wars: The Rise of Skywalker" is Rey, a powerful Force user who is trying to find her place in the galaxy and confront her past. She is played by actress Daisy Ridley. 58 | User: Goodbye 59 | Assistant: Goodbye! Don't hesitate to ask if you have any more questions. 60 | info: Microsoft.Hosting.Lifetime[0] 61 | Application is shutting down... 62 | ``` 63 | 64 | ## How does this app work? 65 | 66 | This app uses [Semantic Kernel](https://github.com/microsoft/semantic-kernel), an open source .NET library from Microsoft that aims to simplify building apps that use LLMs. You can read more about Semantic Kernel in the following locations: 67 | 68 | * [The Semantic Kernel documentation on Microsoft Learn](https://learn.microsoft.com/semantic-kernel) 69 | * [The Semantic Kernel repo on GitHub](https://github.com/microsoft/semantic-kernel) 70 | * [The Semantic Kernel Discord](https://aka.ms/sk/discord) 71 | * [The Semantic Kernel blog](https://devblogs.microsoft.com/semantic-kernel/) 72 | 73 | Semantic Kernel provides services to interact with both text and chat focused LLMs either using OpenAI directly, or via the Azure Open AI service. 74 | 75 | The app runs as a hosted application using the Microsoft hosting extension library. 76 | 77 | ### Services 78 | 79 | The chat service has a chat history to implement a memory so that conversations can flow naturally, and earlier prompts and responses can be chained to build later prompts. For example, the prompt: 80 | 81 | ```output 82 | What is the most recent star wars movie? 83 | ``` 84 | 85 | Will give a response such as: 86 | 87 | ```output 88 | The most recent Star Wars movie is "Star Wars: The Rise of Skywalker," which was released in December 2019. It is the ninth and final installment in the Skywalker saga. 89 | ``` 90 | 91 | This prompt and response can be chained with the next prompt: 92 | 93 | ```output 94 | Who is the main character? 95 | ``` 96 | 97 | to give a response that 'knows' that the main character question refers to the "Star Wars: The Rise of Skywalker" movie: 98 | 99 | ```output 100 | The main character in "Star Wars: The Rise of Skywalker" is Rey, a powerful Force user who is trying to find her place in the galaxy and confront her past. She is played by actress Daisy Ridley. 101 | ``` 102 | 103 | ### Skills 104 | 105 | Semantic Kernel uses *skills* that define named *functions*. These functions communicate using text - the basic building block of large language models. Functions can take a `string` as a parameter, return a `string` or both. You can then chain these functions to build a pipeline. 106 | 107 | In this app, the `ConsoleSkill` implements 3 semantic kernel functions: 108 | 109 | ```csharp 110 | [SKFunction("Get console input.")] 111 | [SKFunctionName("Listen")] 112 | public Task Listen(SKContext context); 113 | 114 | [SKFunction("Write a response to the console.")] 115 | [SKFunctionName("Respond")] 116 | public Task Respond(string message, SKContext context); 117 | 118 | [SKFunction("Did the user say goodbye.")] 119 | [SKFunctionName("IsGoodbye")] 120 | public Task IsGoodbye(SKContext context); 121 | ``` 122 | 123 | The `Listen` function takes input from the console, the `Respond` function writes a response to the console (and returns it), and the `IsGoodbye` function returns if the `Listen` function received "goodbye" as it's input. 124 | 125 | The `ChatSkill` implements 2 functions: 126 | 127 | ```csharp 128 | [SKFunction("Send a prompt to the LLM.")] 129 | [SKFunctionName("Prompt")] 130 | public async Task Prompt(string prompt); 131 | 132 | [SKFunction("Log the history of the chat with the LLM.")] 133 | [SKFunctionName("LogChatHistory")] 134 | public Task LogChatHistory(); 135 | ``` 136 | 137 | The `Prompt` function sends the given prompt to the LLM and returns the response. This uses the `IChatCompletion` service from semantic kernel. This is a service that has methods to create and manage chats. You can create a chat with a system prompt that gives the LLM background information to use when crafting responses. For example, the default system prompt for this app is: 138 | 139 | _You are a friendly, intelligent, and curious assistant who is good at conversation. Your name is [Orko](https://en.wikipedia.org/wiki/Orko_(character))._ 140 | 141 | This prompt is set in the `configuration.json` file, so can be changed to suit your needs. This prompt sets up all chats, so if you were to ask: 142 | 143 | ```output 144 | What is your name? 145 | ``` 146 | 147 | You would get the response: 148 | 149 | ```output 150 | My name is Orko. How can I assist you today? 151 | ``` 152 | 153 | This service also manages chat history through an instance of `OpenAIChatHistory`. This tracks history of the chat, tagging messages as `System` for the system prompt, `User` for prompts from the user and `Assistant` for the responses. This chat history is passed with every prompt so that the LLM can use the chat history to guide the response. There is a limit to the size that the LLM can handle for the prompt, so only the size specified in the `MaxTokens` field in the `configuration.json` file is sent. You can read more about tokens and their size in the [OpenAI documentation](https://help.openai.com/articles/4936856-what-are-tokens-and-how-to-count-them). 154 | 155 | The `LogChatHistory` function logs the chat history, including all the system, user and assistant messages. This is called at the end of the session to show the user what was sent and received. For example: 156 | 157 | ```output 158 | System: You are a friendly, intelligent, and curious assistant who is good at conversation. Your name is Orko. 159 | User: What is the most recent star wars movie? 160 | Assistant: The most recent Star Wars movie is "Star Wars: The Rise of Skywalker," which was released in December 2019. It is the ninth and final installment in the Skywalker saga. 161 | User: Who is the main character? 162 | Assistant: The main character in "Star Wars: The Rise of Skywalker" is Rey, a powerful Force user who is trying to find her place in the galaxy and confront her past. She is played by actress Daisy Ridley. 163 | User: Goodbye 164 | Assistant: Goodbye! Don't hesitate to ask if you have any more questions. 165 | ``` 166 | 167 | Semantic Kernel also has some out of the box skills to do things like interact with HTTP APIs, work with files or manipulate text. 168 | 169 | The advantage of using skills is that you can easily swap out skills as long as they have the same name. For example, in this app there is an alternative to the `ConsoleSkill` that uses speech to text and text to speech to interact with the user. It has the same functions on it marked with the same attributes, so can be swapped in. 170 | 171 | ### Creating functions from prompts 172 | 173 | This app also had (and is commented out to start with), and example function that converts text to poetry. Functions do not need to be built in code, but can be created using a text prompt. The poetry function is created with the following code: 174 | 175 | ```csharp 176 | string poemPrompt = """ 177 | Take this "{{$INPUT}}" and convert it to a poem in iambic pentameter. 178 | """; 179 | 180 | _poemFunction = _semanticKernel.CreateSemanticFunction(poemPrompt, maxTokens: openAIOptions.Value.MaxTokens, 181 | temperature: openAIOptions.Value.Temperature, frequencyPenalty: openAIOptions.Value.FrequencyPenalty, 182 | presencePenalty: openAIOptions.Value.PresencePenalty, topP: openAIOptions.Value.TopP); 183 | ``` 184 | 185 | This code creates the function using the prompt _Take this "{{$INPUT}}" and convert it to a poem in iambic pentameter._. The function that is created with the call to `CreateSemanticFunction` is a function that takes a string as input, replaces the `{{$INPUT}}` field in the text with that string, sends it to the text completion service, and returns the result. This allows you to quickly create libraries of standard prompts in text that can be used in pipelines to process data. 186 | 187 | ### Pipelines 188 | 189 | Semantic Kernel functions take and return text, so you can chain them together into pipelines. For example, chaining the `Listen`, `Prompt` and `Respond` functions to create a chatbot. 190 | 191 | ```csharp 192 | await _semanticKernel.RunAsync(_speechSkill["Listen"], _chatSkill["Prompt"], _speechSkill["Respond"]); 193 | ``` 194 | 195 | This works as long as any function returns the right input for the next function in the pipeline. For example, the `Listen` function returns a `string`, which is the input for the `Prompt` function, which in turn returns a `string`, which is the input for the `Respond` function. 196 | 197 | If you wanted to add more functions to the pipeline you can, for example inserting the `_poemFunction` mentioned above before the call to the `Respond` function to get the results in poetry. 198 | 199 | These pipelines can be defined in code, so can be constructed on the fly if needed. 200 | 201 | ## Customizing the app 202 | 203 | ### Changing the model 204 | 205 | By default this app uses the 206 | 207 | ### Speech 208 | 209 | By default this app runs on the command line and you type your questions, getting the response out on the command line. You can also enable speech mode to be able to ask your questions with your voice and receive a spoken answer. To do this: 210 | 211 | * Make sure you have the relevant cognitive service resource configured and the key and region set as described above. 212 | 213 | * In `Program.cs` comment out the line that adds the `ConsoleSkill` singleton, and uncomment the line that adds the `AzCognitiveServicesSpeechSkill` singleton. 214 | 215 | ```csharp 216 | // services.AddSingleton(); 217 | services.AddSingleton(); 218 | ``` 219 | 220 | * Run the app 221 | 222 | The app will use your default microphone and speaker to interact with you. Say the word "goodbye" to end the conversation. 223 | 224 | ### Poetry 225 | 226 | This app also includes some example code to show how to create a semantic function using a prompt, in this case to convert the response to poetry. To enable this: 227 | 228 | * In the `ConsoleGPTService`, uncomment the `_poemFunction` field: 229 | 230 | ```csharp 231 | private readonly ISKFunction _poemFunction; 232 | ``` 233 | 234 | * Uncomment the `openAIOptions` constructor parameter: 235 | 236 | ```csharp 237 | IOptions openAIOptions, 238 | ``` 239 | 240 | * Uncomment where this function is created in the `ConsoleGPTService` constructor: 241 | 242 | ```csharp 243 | _semanticKernel.Config.AddOpenAITextCompletionService("text", openAIOptions.Value.TextModel, 244 | openAIOptions.Value.Key); 245 | 246 | string poemPrompt = """ 247 | Take this "{{$INPUT}}" and convert it to a poem in iambic pentameter. 248 | """; 249 | 250 | _poemFunction = _semanticKernel.CreateSemanticFunction(poemPrompt, maxTokens: openAIOptions.Value.MaxTokens, 251 | temperature: openAIOptions.Value.Temperature, frequencyPenalty: openAIOptions.Value.FrequencyPenalty, 252 | presencePenalty: openAIOptions.Value.PresencePenalty, topP: openAIOptions.Value.TopP); 253 | ``` 254 | 255 | * Uncomment the line in `ExecuteAsync` that appends the `_poemFunction` to the pipeline: 256 | 257 | ```csharp 258 | pipeline.Append(_poemFunction).Append(_speechSkill["Respond"]); 259 | ``` 260 | 261 | * Run the app. This works with both the console and speech output 262 | 263 | When the app runs it will output the standard response, then convert it to poetry and output it again. 264 | 265 | ```output 266 | Hello. Ask me a question or say goodbye to exit. 267 | What is the most recent star wars movie? 268 | The most recent Star Wars movie is "Star Wars: The Rise of Skywalker," which was released in December 2019. It is the ninth and final installment in the Skywalker saga. 269 | 270 | 271 | "The Force of Fate has brought us here, 272 | To see the Rise of Skywalker near, 273 | The ninth and final part of this tale, 274 | Released in December, 'twas no fail. 275 | Star Wars, the movie, we all know, 276 | The saga of the Skywalkers show, 277 | The Force will live on, we can see, 278 | In this movie, 'tis our destiny." 279 | ``` 280 | -------------------------------------------------------------------------------- /src/.vscode/launch.json: -------------------------------------------------------------------------------- 1 | { 2 | "version": "0.2.0", 3 | "configurations": [ 4 | { 5 | // Use IntelliSense to find out which attributes exist for C# debugging 6 | // Use hover for the description of the existing attributes 7 | // For further information visit https://github.com/OmniSharp/omnisharp-vscode/blob/master/debugger-launchjson.md 8 | "name": ".NET Core Launch (console)", 9 | "type": "coreclr", 10 | "request": "launch", 11 | "preLaunchTask": "build", 12 | // If you have changed target frameworks, make sure to update the program path. 13 | "program": "${workspaceFolder}/bin/Debug/net7.0/console-gpt.dll", 14 | "args": [], 15 | "cwd": "${workspaceFolder}", 16 | // For more information about the 'console' field, see https://aka.ms/VSCode-CS-LaunchJson-Console 17 | "console": "integratedTerminal", 18 | "stopAtEntry": false 19 | }, 20 | { 21 | "name": ".NET Core Attach", 22 | "type": "coreclr", 23 | "request": "attach" 24 | } 25 | ] 26 | } -------------------------------------------------------------------------------- /src/.vscode/tasks.json: -------------------------------------------------------------------------------- 1 | { 2 | "version": "2.0.0", 3 | "tasks": [ 4 | { 5 | "label": "build", 6 | "command": "dotnet", 7 | "type": "process", 8 | "args": [ 9 | "build", 10 | "${workspaceFolder}/console-gpt.csproj", 11 | "/property:GenerateFullPaths=true", 12 | "/consoleloggerparameters:NoSummary" 13 | ], 14 | "problemMatcher": "$msCompile" 15 | }, 16 | { 17 | "label": "publish", 18 | "command": "dotnet", 19 | "type": "process", 20 | "args": [ 21 | "publish", 22 | "${workspaceFolder}/console-gpt.csproj", 23 | "/property:GenerateFullPaths=true", 24 | "/consoleloggerparameters:NoSummary" 25 | ], 26 | "problemMatcher": "$msCompile" 27 | }, 28 | { 29 | "label": "watch", 30 | "command": "dotnet", 31 | "type": "process", 32 | "args": [ 33 | "watch", 34 | "run", 35 | "--project", 36 | "${workspaceFolder}/console-gpt.csproj" 37 | ], 38 | "problemMatcher": "$msCompile" 39 | } 40 | ] 41 | } -------------------------------------------------------------------------------- /src/Configuration/AzureCognitiveServicesOptions.cs: -------------------------------------------------------------------------------- 1 | #pragma warning disable 8618 2 | namespace ConsoleGPT 3 | { 4 | /// 5 | /// Configuration options class for interacting with Azure Cognitive Services. 6 | /// 7 | public class AzureCognitiveServicesOptions 8 | { 9 | /// 10 | /// Location/region (e.g. EastUS) 11 | /// 12 | public string Region { get; set; } 13 | 14 | /// 15 | /// Access Key 16 | /// 17 | public string Key { get; set; } 18 | 19 | /// 20 | /// Default language for speech recognition (speech-to-text). 21 | /// 22 | public string SpeechRecognitionLanguage { get; set; } 23 | 24 | /// 25 | /// Name of the voice to use for speaking (text-to-speech). 26 | /// 27 | /// 28 | /// https://learn.microsoft.com/en-us/azure/cognitive-services/speech-service/language-support?tabs=stt-tts#text-to-speech 29 | /// 30 | public string SpeechSynthesisVoiceName { get; set; } 31 | 32 | /// 33 | /// True to enable style cues when speaking. 34 | /// 35 | public bool EnableSpeechStyle { get; set; } 36 | 37 | /// 38 | /// Indicates the speaking rate of the text. 39 | /// https://learn.microsoft.com/en-us/azure/cognitive-services/speech-service/speech-synthesis-markup-voice#adjust-prosody 40 | /// 41 | public string Rate { get; set; } 42 | } 43 | } -------------------------------------------------------------------------------- /src/Configuration/OpenAiServiceOptions.cs: -------------------------------------------------------------------------------- 1 | #pragma warning disable CS8618 2 | 3 | namespace ConsoleGPT 4 | { 5 | /// 6 | /// Configuration options for interacting with OpenAI. 7 | /// 8 | public class OpenAiServiceOptions 9 | { 10 | /// 11 | /// API Key. 12 | /// 13 | public string Key { get; set; } 14 | 15 | /// 16 | /// Maximum number of tokens to use when calling OpenAI. 17 | /// 18 | public int MaxTokens { get; set; } 19 | 20 | /// 21 | /// Randomness controls (0.0 - 1.0). 22 | /// 23 | public float Temperature { get; set; } 24 | 25 | /// 26 | /// Diversity (0.0 - 1.0). 27 | /// 28 | public float TopP { get; set; } 29 | 30 | /// 31 | /// How much to penalize new tokens based on existing frequency in the text so far (0.0 - 2.0). 32 | /// 33 | public float FrequencyPenalty { get; set; } 34 | 35 | /// 36 | /// How much to penalize new tokens based on whether they appear in the text so far (0.0 - 2.0). 37 | /// 38 | public float PresencePenalty { get; set; } 39 | 40 | /// 41 | /// Name of the chat model to use (e.g. text-davinci-002). 42 | /// 43 | public string ChatModel { get; set; } 44 | 45 | /// 46 | /// Name of the text model to use (e.g. text-davinci-002). 47 | /// 48 | public string TextModel { get; set; } 49 | 50 | /// 51 | /// Initial prompt for the conversation. 52 | /// 53 | public string SystemPrompt { get; set; } 54 | } 55 | } -------------------------------------------------------------------------------- /src/ConsoleGPTService.cs: -------------------------------------------------------------------------------- 1 | using ConsoleGPT.Skills; 2 | 3 | using Microsoft.Extensions.Hosting; 4 | using Microsoft.Extensions.Options; 5 | using Microsoft.SemanticKernel; 6 | using Microsoft.SemanticKernel.Orchestration; 7 | 8 | namespace ConsoleGPT 9 | { 10 | /// 11 | /// This is the main application service. 12 | /// This takes console input, then sends it to the configured AI service, and then prints the response. 13 | /// All conversation history is maintained in the chat history. 14 | /// 15 | internal class ConsoleGPTService : IHostedService 16 | { 17 | private readonly IKernel _semanticKernel; 18 | private readonly IDictionary _speechSkill; 19 | private readonly IDictionary _chatSkill; 20 | private readonly IHostApplicationLifetime _lifeTime; 21 | 22 | // Uncomment this to create a function that converts text to a poem 23 | // private readonly ISKFunction _poemFunction; 24 | 25 | public ConsoleGPTService(IKernel semanticKernel, 26 | ISpeechSkill speechSkill, 27 | ChatSkill chatSkill, 28 | // IOptions openAIOptions, 29 | IHostApplicationLifetime lifeTime) 30 | { 31 | _semanticKernel = semanticKernel; 32 | _lifeTime = lifeTime; 33 | 34 | // Import the skills to load the semantic kernel functions 35 | _speechSkill = _semanticKernel.ImportSkill(speechSkill); 36 | _chatSkill = _semanticKernel.ImportSkill(chatSkill); 37 | 38 | // Uncomment this to create a function that converts text to a poem 39 | // _semanticKernel.Config.AddOpenAITextCompletionService("text", openAIOptions.Value.TextModel, 40 | // openAIOptions.Value.Key); 41 | 42 | // var poemPrompt = """ 43 | // Take this "{{$INPUT}}" and convert it to a poem in iambic pentameter. 44 | // """; 45 | 46 | // _poemFunction = _semanticKernel.CreateSemanticFunction(poemPrompt, maxTokens: openAIOptions.Value.MaxTokens, 47 | // temperature: openAIOptions.Value.Temperature, frequencyPenalty: openAIOptions.Value.FrequencyPenalty, 48 | // presencePenalty: openAIOptions.Value.PresencePenalty, topP: openAIOptions.Value.TopP); 49 | } 50 | 51 | /// 52 | /// Start the service. 53 | /// 54 | public Task StartAsync(CancellationToken cancellationToken) 55 | { 56 | Task.Run(() => ExecuteAsync(cancellationToken), cancellationToken); 57 | return Task.CompletedTask; 58 | } 59 | 60 | /// 61 | /// Stop a running service. 62 | /// 63 | public Task StopAsync(CancellationToken cancellationToken) => Task.CompletedTask; 64 | 65 | /// 66 | /// The main execution loop. This awaits input and responds to it using semantic kernel functions. 67 | /// 68 | private async Task ExecuteAsync(CancellationToken cancellationToken) 69 | { 70 | // Write to the console that the conversation is beginning 71 | await _semanticKernel.RunAsync("Hello. Ask me a question or say goodbye to exit.", _speechSkill["Respond"]); 72 | 73 | // Loop till we are cancelled 74 | while (!cancellationToken.IsCancellationRequested) 75 | { 76 | // Create our pipeline 77 | ISKFunction[] pipeline = {_speechSkill["Listen"], _chatSkill["Prompt"], _speechSkill["Respond"]}; 78 | 79 | // Uncomment the following line to include the poem function in the pipeline 80 | // pipeline = pipeline.Append(_poemFunction).Append(_speechSkill["Respond"]).ToArray(); 81 | 82 | // Run the pipeline 83 | await _semanticKernel.RunAsync(pipeline); 84 | 85 | // Did we say goodbye? If so, exit 86 | var goodbyeContext = await _semanticKernel.RunAsync(_speechSkill["IsGoodbye"]); 87 | var isGoodbye = bool.Parse(goodbyeContext.Result); 88 | 89 | // If the user says goodbye, end the chat 90 | if (isGoodbye) 91 | { 92 | // Log the history so we can see the prompts used 93 | await _semanticKernel.RunAsync(_chatSkill["LogChatHistory"]); 94 | 95 | // Stop the application 96 | _lifeTime.StopApplication(); 97 | break; 98 | } 99 | } 100 | } 101 | } 102 | } 103 | -------------------------------------------------------------------------------- /src/Program.cs: -------------------------------------------------------------------------------- 1 | using System.Reflection; 2 | 3 | using ConsoleGPT; 4 | using ConsoleGPT.Skills; 5 | 6 | using Microsoft.Extensions.Configuration; 7 | using Microsoft.Extensions.DependencyInjection; 8 | using Microsoft.Extensions.Hosting; 9 | using Microsoft.SemanticKernel; 10 | 11 | // Create the host builder 12 | var builder = Host.CreateDefaultBuilder(args); 13 | 14 | // Load the configuration file and user secrets 15 | // 16 | // These need to be set either directly in the configuration.json file or in the user secrets. Details are in 17 | // the configuration.json file. 18 | #pragma warning disable CS8604 19 | var configurationFilePath = Path.Combine(Path.GetDirectoryName(Assembly.GetExecutingAssembly().Location), "configuration.json"); 20 | #pragma warning restore CS8604 21 | builder.ConfigureAppConfiguration((builder) => builder 22 | .AddJsonFile(configurationFilePath) 23 | .AddEnvironmentVariables() 24 | .AddUserSecrets()); 25 | 26 | // Configure the services for the host 27 | builder.ConfigureServices((context, services) => 28 | { 29 | // Setup configuration options 30 | var configurationRoot = context.Configuration; 31 | services.Configure(configurationRoot.GetSection("AzureCognitiveServices")); 32 | services.Configure(configurationRoot.GetSection("OpenAI")); 33 | 34 | // Add Semantic Kernel 35 | services.AddSingleton(serviceProvider => Kernel.Builder.Build()); 36 | 37 | // Add Native Skills 38 | 39 | // Use one of these 2 lines for the use input or output. 40 | // Console Skill is for console interactions, AzCognitiveServicesSpeechSkill is to interact using a mic and speakers 41 | services.AddSingleton(); 42 | // services.AddSingleton(); 43 | 44 | services.AddSingleton(); 45 | 46 | // Add the primary hosted service to start the loop. 47 | services.AddHostedService(); 48 | }); 49 | 50 | // Build and run the host. This keeps the app running using the HostedService. 51 | var host = builder.Build(); 52 | await host.RunAsync(); -------------------------------------------------------------------------------- /src/Skills/AzCognitiveServicesSpeechSkill.cs: -------------------------------------------------------------------------------- 1 | using System.Text.RegularExpressions; 2 | using Microsoft.CognitiveServices.Speech; 3 | using Microsoft.CognitiveServices.Speech.Audio; 4 | using Microsoft.Extensions.Logging; 5 | using Microsoft.Extensions.Options; 6 | using Microsoft.SemanticKernel.Orchestration; 7 | using Microsoft.SemanticKernel.SkillDefinition; 8 | 9 | namespace ConsoleGPT.Skills 10 | { 11 | /// 12 | /// A speech skill using Azure Cognitive Services speech recognition and synthesis. 13 | /// 14 | public class AzCognitiveServicesSpeechSkill : IDisposable, ISpeechSkill 15 | { 16 | private readonly ILogger _logger; 17 | private readonly AzureCognitiveServicesOptions _options; 18 | private readonly AudioConfig _audioConfig; 19 | private readonly SpeechRecognizer _speechRecognizer; 20 | private readonly SpeechSynthesizer _speechSynthesizer; 21 | private bool _isGoodbye = false; 22 | 23 | 24 | public AzCognitiveServicesSpeechSkill( 25 | IOptions options, 26 | ILogger logger) 27 | { 28 | _logger = logger; 29 | _options = options.Value; 30 | 31 | // Build an audio config from the default microphone - this needs to be configured correctly 32 | _audioConfig = AudioConfig.FromDefaultMicrophoneInput(); 33 | 34 | // Build a speech configuration from our settings 35 | var speechConfig = SpeechConfig.FromSubscription(_options.Key, _options.Region); 36 | speechConfig.SpeechRecognitionLanguage = _options.SpeechRecognitionLanguage; 37 | speechConfig.SetProperty(PropertyId.SpeechServiceResponse_PostProcessingOption, "TrueText"); 38 | speechConfig.SpeechSynthesisVoiceName = _options.SpeechSynthesisVoiceName; 39 | 40 | // Create the speech synthesizer and recognizer 41 | _speechRecognizer = new SpeechRecognizer(speechConfig, _audioConfig); 42 | _speechSynthesizer = new SpeechSynthesizer(speechConfig); 43 | } 44 | 45 | /// 46 | /// Listens to the microphone and performs speech-to-text, returning what the user said. 47 | /// 48 | [SKFunction("Listen to the microphone and perform speech-to-text.")] 49 | [SKFunctionName("Listen")] 50 | public async Task Listen(SKContext context) 51 | { 52 | _logger.LogInformation("Listening..."); 53 | 54 | // Listen till a natural break in the speech is detected 55 | var result = await _speechRecognizer.RecognizeOnceAsync(); 56 | 57 | // Check the result and see if we got text 58 | if (result.Reason == ResultReason.RecognizedSpeech) 59 | { 60 | // If we got speech, log it 61 | _logger.LogInformation($"Recognized: {result.Text}"); 62 | 63 | // Check if the user said goodbye - the application will use this after processing the speech 64 | // to terminate the app 65 | if (result.Text.ToLower().StartsWith("goodbye")) 66 | _isGoodbye = true; 67 | 68 | // Return the speech 69 | return result.Text; 70 | } 71 | 72 | // If we didn't get speech, return an empty string 73 | return string.Empty; 74 | } 75 | 76 | /// 77 | /// Speaks the given message using text-to-speech. This returns the message so it can be used 78 | /// in the next function in the pipeline 79 | /// 80 | [SKFunction("Speak the current context (text-to-speech).")] 81 | [SKFunctionName("Respond")] 82 | public async Task Respond(string message, SKContext context) 83 | { 84 | // Check if we have a message to speak 85 | if (!string.IsNullOrWhiteSpace(message)) 86 | { 87 | _logger.LogInformation($"Speaking: {message}"); 88 | 89 | // Build some SSML with the text to speak 90 | string ssml = GenerateSsml(message, _options.SpeechSynthesisVoiceName); 91 | 92 | _logger.LogDebug(ssml); 93 | 94 | // Speak the SSML 95 | await _speechSynthesizer.SpeakSsmlAsync(ssml); 96 | } 97 | 98 | // Return the message so the next function in the pipeline can use it 99 | return message; 100 | } 101 | 102 | /// 103 | /// Checks if the user said goodbye 104 | /// 105 | [SKFunction("Did the user say goodbye.")] 106 | [SKFunctionName("IsGoodbye")] 107 | public Task IsGoodbye(SKContext context) 108 | { 109 | return Task.FromResult(_isGoodbye ? "true" : "false"); 110 | } 111 | 112 | /// 113 | /// Generate speech synthesis markup language (SSML) from a message for the given voice. 114 | /// 115 | private string GenerateSsml(string message, string voiceName) 116 | => "" + 117 | $"" + 118 | $"" + 119 | $"{message}" + 120 | "" + 121 | "" + 122 | ""; 123 | 124 | /// 125 | /// Dispose of the speech synthesizer and recognizer 126 | /// 127 | public void Dispose() 128 | { 129 | _speechRecognizer.Dispose(); 130 | _audioConfig.Dispose(); 131 | } 132 | } 133 | } -------------------------------------------------------------------------------- /src/Skills/ChatSkill.cs: -------------------------------------------------------------------------------- 1 | using Microsoft.Extensions.Options; 2 | using Microsoft.SemanticKernel; 3 | using Microsoft.SemanticKernel.AI; 4 | using Microsoft.SemanticKernel.AI.ChatCompletion; 5 | using Microsoft.SemanticKernel.Connectors.OpenAI.ChatCompletion; 6 | using Microsoft.SemanticKernel.SkillDefinition; 7 | 8 | namespace ConsoleGPT.Skills 9 | { 10 | /// 11 | /// A Sematic Kernel skill that interacts with ChatGPT 12 | /// 13 | public class ChatSkill 14 | { 15 | private readonly IChatCompletion _chatCompletion; 16 | private readonly OpenAIChatHistory _chatHistory; 17 | private readonly ChatRequestSettings _chatRequestSettings; 18 | 19 | public ChatSkill(IKernel semanticKernel, 20 | IOptions openAIOptions) 21 | { 22 | // Set up the chat request settings 23 | _chatRequestSettings = new ChatRequestSettings() 24 | { 25 | MaxTokens = openAIOptions.Value.MaxTokens, 26 | Temperature = openAIOptions.Value.Temperature, 27 | FrequencyPenalty = openAIOptions.Value.FrequencyPenalty, 28 | PresencePenalty = openAIOptions.Value.PresencePenalty, 29 | TopP = openAIOptions.Value.TopP 30 | }; 31 | 32 | // Configure the semantic kernel 33 | semanticKernel.Config.AddOpenAIChatCompletionService("chat", openAIOptions.Value.ChatModel, 34 | openAIOptions.Value.Key); 35 | 36 | // Set up the chat completion and history - the history is used to keep track of the conversation 37 | // and is part of the prompt sent to ChatGPT to allow a continuous conversation 38 | _chatCompletion = semanticKernel.GetService(); 39 | _chatHistory = (OpenAIChatHistory)_chatCompletion.CreateNewChat(openAIOptions.Value.SystemPrompt); 40 | } 41 | 42 | /// 43 | /// Send a prompt to the LLM. 44 | /// 45 | [SKFunction("Send a prompt to the LLM.")] 46 | [SKFunctionName("Prompt")] 47 | public async Task Prompt(string prompt) 48 | { 49 | var reply = string.Empty; 50 | try 51 | { 52 | // Add the question as a user message to the chat history, then send everything to OpenAI. 53 | // The chat history is used as context for the prompt 54 | _chatHistory.AddUserMessage(prompt); 55 | reply = await _chatCompletion.GenerateMessageAsync(_chatHistory, _chatRequestSettings); 56 | 57 | // Add the interaction to the chat history. 58 | _chatHistory.AddAssistantMessage(reply); 59 | } 60 | catch (AIException aiex) 61 | { 62 | // Reply with the error message if there is one 63 | reply = $"OpenAI returned an error ({aiex.Message}). Please try again."; 64 | } 65 | 66 | return reply; 67 | } 68 | 69 | /// 70 | /// Log the history of the chat with the LLM. 71 | /// This will log the system prompt that configures the chat, along with the user and assistant messages. 72 | /// 73 | [SKFunction("Log the history of the chat with the LLM.")] 74 | [SKFunctionName("LogChatHistory")] 75 | public Task LogChatHistory() 76 | { 77 | Console.WriteLine(); 78 | Console.WriteLine("Chat history:"); 79 | Console.WriteLine(); 80 | 81 | // Log the chat history including system, user and assistant (AI) messages 82 | foreach (var message in _chatHistory.Messages) 83 | { 84 | // Depending on the role, use a different color 85 | var role = message.AuthorRole; 86 | switch (role) 87 | { 88 | case "system": 89 | role = "System: "; 90 | Console.ForegroundColor = ConsoleColor.Blue; 91 | break; 92 | case "user": 93 | role = "User: "; 94 | Console.ForegroundColor = ConsoleColor.Yellow; 95 | break; 96 | case "assistant": 97 | role = "Assistant: "; 98 | Console.ForegroundColor = ConsoleColor.Green; 99 | break; 100 | } 101 | 102 | // Write the role and the message 103 | Console.WriteLine($"{role}{message.Content}"); 104 | } 105 | 106 | return Task.CompletedTask; 107 | } 108 | } 109 | } -------------------------------------------------------------------------------- /src/Skills/ConsoleSkill.cs: -------------------------------------------------------------------------------- 1 | using Microsoft.SemanticKernel.Orchestration; 2 | using Microsoft.SemanticKernel.SkillDefinition; 3 | 4 | namespace ConsoleGPT.Skills 5 | { 6 | /// 7 | /// A Sematic Kernel skill that provides the ability to read and write from the console 8 | /// 9 | public class ConsoleSkill: ISpeechSkill 10 | { 11 | private bool _isGoodbye = false; 12 | 13 | /// 14 | /// Gets input from the console 15 | /// 16 | [SKFunction("Get console input.")] 17 | [SKFunctionName("Listen")] 18 | public Task Listen(SKContext context) 19 | { 20 | return Task.Run(() => { 21 | var line = ""; 22 | 23 | while (string.IsNullOrWhiteSpace(line)) 24 | { 25 | line = Console.ReadLine(); 26 | } 27 | 28 | if (line.ToLower().StartsWith("goodbye")) 29 | _isGoodbye = true; 30 | 31 | return line; 32 | }); 33 | } 34 | 35 | /// 36 | /// Writes output to the console 37 | /// 38 | [SKFunction("Write a response to the console.")] 39 | [SKFunctionName("Respond")] 40 | public Task Respond(string message, SKContext context) 41 | { 42 | return Task.Run(() => { 43 | WriteAIResponse(message); 44 | return message; 45 | }); 46 | } 47 | 48 | /// 49 | /// Checks if the user said goodbye 50 | /// 51 | [SKFunction("Did the user say goodbye.")] 52 | [SKFunctionName("IsGoodbye")] 53 | public Task IsGoodbye(SKContext context) 54 | { 55 | return Task.FromResult(_isGoodbye ? "true" : "false"); 56 | } 57 | 58 | /// 59 | /// Write a response to the console in green. 60 | /// 61 | private void WriteAIResponse(string response) 62 | { 63 | // Write the response in Green, then revert the console color 64 | var oldColor = Console.ForegroundColor; 65 | Console.ForegroundColor = ConsoleColor.Green; 66 | Console.WriteLine(response); 67 | Console.ForegroundColor = oldColor; 68 | } 69 | } 70 | } -------------------------------------------------------------------------------- /src/Skills/ISpeechSkill.cs: -------------------------------------------------------------------------------- 1 | using Microsoft.SemanticKernel.Orchestration; 2 | 3 | namespace ConsoleGPT.Skills 4 | { 5 | /// 6 | /// An interface for a Sematic Kernel skill that provides the ability to read and write from the console 7 | /// 8 | public interface ISpeechSkill 9 | { 10 | /// 11 | /// Gets input from the user 12 | /// 13 | public Task Listen(SKContext context); 14 | 15 | /// 16 | /// Responds tp the user 17 | /// 18 | public Task Respond(string message, SKContext context); 19 | 20 | 21 | /// 22 | /// Gets if Listen function detected goodbye from the user 23 | /// 24 | public Task IsGoodbye(SKContext context); 25 | } 26 | } -------------------------------------------------------------------------------- /src/configuration.json: -------------------------------------------------------------------------------- 1 | { 2 | "AzureCognitiveServices": { 3 | "Key": "", // dotnet user-secrets set "AzureCognitiveServices:Key" "****" 4 | "Region": "", // dotnet user-secrets set "AzureCognitiveServices:Region" "****" 5 | "SpeechRecognitionLanguage": "en-US", 6 | "SpeechSynthesisVoiceName": "en-US-JennyNeural", // For a list of available voices, see https://aka.ms/maker/friendbot/voicenames 7 | "EnableSpeechStyle": "false", // TODO: not supported yet after the migration to ChatGPT. 8 | "Rate": "+15%", // https://learn.microsoft.com/en-us/azure/cognitive-services/speech-service/speech-synthesis-markup-voice#adjust-prosody 9 | }, 10 | "OpenAI": { 11 | "Key": "", // dotnet user-secrets set "OpenAI:Key" "****" 12 | "MaxTokens": 1500, 13 | "Temperature": 0.7, // 0.0 - 1.0 14 | "FrequencyPenalty": 0, // 0.0 - 2.0 15 | "PresencePenalty": 0, // 0.0 - 2.0 16 | "ChatModel": "gpt-3.5-turbo", // The model for chat, needs to be a gpt model, see https://platform.openai.com/docs/models/overview 17 | "TextModel": "text-davinci-003", // The model for text completion, see https://platform.openai.com/docs/models/overview for a list of models 18 | "SystemPrompt": "You are a friendly, intelligent, and curious assistant who is good at conversation. Your name is Orko." 19 | } 20 | } 21 | -------------------------------------------------------------------------------- /src/console-gpt.csproj: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | Exe 5 | net7.0 6 | console_gpt 7 | enable 8 | enable 9 | 101d672c-bd5f-41ea-8f24-58b5cac0bc6d 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | PreserveNewest 23 | 24 | 25 | 26 | 27 | --------------------------------------------------------------------------------