├── +llms ├── +azure │ ├── apiVersions.m │ └── validateResponseFormat.m ├── +internal │ ├── callAzureChatAPI.m │ ├── callOllamaChatAPI.m │ ├── callOpenAIChatAPI.m │ ├── getApiKeyFromNvpOrEnv.m │ ├── gptPenalties.m │ ├── hasTools.m │ ├── jsonSchemaFromPrototype.m │ ├── needsAPIKey.m │ ├── reformatOutput.m │ ├── sendRequest.m │ ├── sendRequestWrapper.m │ ├── textGenerator.m │ ├── useSameFieldTypes.m │ └── verbatimJSON.m ├── +openai │ ├── models.m │ ├── validateMessageSupported.m │ └── validateResponseFormat.m ├── +stream │ └── responseStreamer.m ├── +utils │ ├── errorMessageCatalog.m │ ├── isUnique.m │ ├── mustBeNonzeroLengthTextScalar.m │ ├── mustBeResponseFormat.m │ ├── mustBeTextOrEmpty.m │ ├── mustBeValidPenalty.m │ ├── mustBeValidProbability.m │ ├── mustBeValidStop.m │ ├── mustBeValidTemperature.m │ └── requestsStructuredOutput.m └── jsonSchemaFromPrototype.m ├── .gitattributes ├── .githooks └── pre-commit ├── .github ├── CODEOWNERS └── workflows │ └── ci.yml ├── .gitignore ├── DEVELOPMENT.md ├── README.md ├── SECURITY.md ├── azureChat.m ├── doc ├── Azure.md ├── Ollama.md ├── OpenAI.md └── functions │ ├── addParameter.md │ ├── addResponseMessage.md │ ├── addSystemMessage.md │ ├── addToolMessage.md │ ├── addUserMessage.md │ ├── addUserMessageWithImages.md │ ├── azureChat.md │ ├── createVariation.md │ ├── edit.md │ ├── generate.md │ ├── images │ ├── addUserMessageWithImages1.png │ ├── azureEnvExample.png │ ├── boardwalk.png │ ├── createVariation1.png │ ├── createVariation2.png │ ├── edit1.png │ ├── edit2.png │ ├── envExample.png │ ├── octopus.png │ ├── openAIFunction1.png │ ├── openAIFunction2.png │ ├── openAIImages.generate1.png │ └── openAIImages1.png │ ├── messageHistory.md │ ├── ollamaChat.md │ ├── openAIChat.md │ ├── openAIFunction.md │ ├── openAIImages.generate.md │ ├── openAIImages.md │ └── removeMessage.md ├── examples ├── AnalyzeScientificPapersUsingFunctionCalls.md ├── AnalyzeSentimentinTextUsingChatGPTwithStructuredOutput.md ├── AnalyzeTextDataUsingParallelFunctionCallwithChatGPT.md ├── AnalyzeTextDataUsingParallelFunctionCallwithOllama.md ├── CreateSimpleChatBot.md ├── CreateSimpleOllamaChatBot.md ├── DescribeImagesUsingChatGPT.md ├── DescribeImagesUsingChatGPT_media │ └── figure_0.png ├── InformationRetrievalUsingOpenAIDocumentEmbedding.md ├── ProcessGeneratedTextInRealTimeByUsingOllamaInStreamingMode.md ├── ProcessGeneratedTextinRealTimebyUsingChatGPTinStreamingMode.md ├── RetrievalAugmentedGenerationUsingChatGPTandMATLAB.md ├── RetrievalAugmentedGenerationUsingOllamaAndMATLAB.md ├── SummarizeLargeDocumentsUsingChatGPTandMATLAB.md ├── UsingDALLEToEditImages.md ├── UsingDALLEToEditImages_media │ ├── figure_0.png │ ├── figure_1.png │ ├── figure_2.png │ └── figure_3.png ├── UsingDALLEToGenerateImages.md ├── UsingDALLEToGenerateImages_media │ ├── figure_0.png │ └── figure_1.png ├── images │ ├── bear.png │ └── mask_bear.png └── mlx-scripts │ ├── AnalyzeScientificPapersUsingFunctionCalls.mlx │ ├── AnalyzeSentimentinTextUsingChatGPTwithStructuredOutput.mlx │ ├── AnalyzeTextDataUsingParallelFunctionCallwithChatGPT.mlx │ ├── AnalyzeTextDataUsingParallelFunctionCallwithOllama.mlx │ ├── CreateSimpleChatBot.mlx │ ├── CreateSimpleOllamaChatBot.mlx │ ├── DescribeImagesUsingChatGPT.mlx │ ├── InformationRetrievalUsingOpenAIDocumentEmbedding.mlx │ ├── ProcessGeneratedTextInRealTimeByUsingOllamaInStreamingMode.mlx │ ├── ProcessGeneratedTextinRealTimebyUsingChatGPTinStreamingMode.mlx │ ├── RetrievalAugmentedGenerationUsingChatGPTandMATLAB.mlx │ ├── RetrievalAugmentedGenerationUsingOllamaAndMATLAB.mlx │ ├── SummarizeLargeDocumentsUsingChatGPTandMATLAB.mlx │ ├── UsingDALLEToEditImages.mlx │ └── UsingDALLEToGenerateImages.mlx ├── extractOpenAIEmbeddings.m ├── functionSignatures.json ├── license.txt ├── messageHistory.m ├── ollamaChat.m ├── openAIChat.m ├── openAIFunction.m ├── openAIImages.m ├── openAIMessages.m └── tests ├── hopenAIChat.m ├── hstructuredOutput.m ├── htoolCalls.m ├── private ├── recording-doubles │ ├── +llms │ │ └── +internal │ │ │ └── sendRequestWrapper.m │ └── addpath.m └── replaying-doubles │ ├── +llms │ └── +internal │ │ └── sendRequestWrapper.m │ └── addpath.m ├── recordings ├── AnalyzeScientificPapersUsingFunctionCalls.mat ├── AnalyzeSentimentinTextUsingChatGPTwithStructuredOutput.mat ├── AnalyzeTextDataUsingParallelFunctionCallwithChatGPT.mat ├── AnalyzeTextDataUsingParallelFunctionCallwithOllama.mat ├── CreateSimpleChatBot.mat ├── CreateSimpleOllamaChatBot.mat ├── DescribeImagesUsingChatGPT.mat ├── InformationRetrievalUsingOpenAIDocumentEmbedding.mat ├── ProcessGeneratedTextInRealTimeByUsingOllamaInStreamingMode.mat ├── ProcessGeneratedTextinRealTimebyUsingChatGPTinStreamingMode.mat ├── README.md ├── RetrievalAugmentedGenerationUsingChatGPTandMATLAB.mat ├── RetrievalAugmentedGenerationUsingOllamaAndMATLAB.mat ├── SummarizeLargeDocumentsUsingChatGPTandMATLAB.mat ├── UsingDALLEToEditImages.mat └── UsingDALLEToGenerateImages.mat ├── tazureChat.m ├── terrorMessageCatalog.m ├── test_files └── solar.png ├── texampleTests.m ├── textractOpenAIEmbeddings.m ├── tjsonSchemaFromPrototype.m ├── tmessageHistory.m ├── tollamaChat.m ├── topenAIChat.m ├── topenAIFunction.m ├── topenAIImages.m ├── topenAIMessages.m ├── tresponseStreamer.m └── tuseSameFieldTypes.m /+llms/+azure/apiVersions.m: -------------------------------------------------------------------------------- 1 | function versions = apiVersions 2 | %VERSIONS - supported azure API versions 3 | 4 | % Copyright 2024 The MathWorks, Inc. 5 | versions = [... 6 | "2025-02-01-preview", ... 7 | "2025-01-01-preview", ... 8 | "2024-12-01-preview", ... 9 | "2024-10-01-preview", ... 10 | "2024-09-01-preview", ... 11 | "2024-08-01-preview", ... 12 | "2024-07-01-preview", ... 13 | "2024-05-01-preview", ... 14 | "2024-04-01-preview", ... 15 | "2024-03-01-preview", ... 16 | "2024-10-21", ... 17 | "2024-06-01", ... 18 | "2024-02-01", ... 19 | "2023-05-15", ... 20 | ]; 21 | end 22 | -------------------------------------------------------------------------------- /+llms/+azure/validateResponseFormat.m: -------------------------------------------------------------------------------- 1 | function validateResponseFormat(format,model,messages) 2 | %validateResponseFormat - validate requested response format is available for selected API Version 3 | 4 | % Copyright 2024 The MathWorks, Inc. 5 | 6 | if ischar(format) | iscellstr(format) %#ok 7 | format = string(format); 8 | end 9 | 10 | if isstring(format) && isequal(lower(format),"json") 11 | if nargin > 2 12 | % OpenAI requires that the prompt or message describing the format must contain the word `"json"` or `"JSON"`. 13 | if ~any(cellfun(@(s) contains(s.content,"json","IgnoreCase",true), messages)) 14 | error("llms:warningJsonInstruction", ... 15 | llms.utils.errorMessageCatalog.getMessage("llms:warningJsonInstruction")) 16 | end 17 | end 18 | end 19 | 20 | if requestsStructuredOutput(format) 21 | % the beauty of ISO-8601: comparing dates by string comparison 22 | if model.APIVersion < "2024-08-01" 23 | error("llms:structuredOutputRequiresAPI", ... 24 | llms.utils.errorMessageCatalog.getMessage("llms:structuredOutputRequiresAPI", model.APIVersion)); 25 | end 26 | end 27 | end 28 | 29 | function tf = requestsStructuredOutput(format) 30 | % If the response format is not "text" or "json", then the input is interpreted as structured output. 31 | tf = ~isequal(format, "text") & ~isequal(format, "json"); 32 | end 33 | -------------------------------------------------------------------------------- /+llms/+internal/callAzureChatAPI.m: -------------------------------------------------------------------------------- 1 | function [text, message, response] = callAzureChatAPI(endpoint, deploymentID, messages, functions, nvp) 2 | % This function is undocumented and will change in a future release 3 | 4 | %callAzureChatAPI Calls the openAI chat completions API on Azure. 5 | % 6 | % MESSAGES and FUNCTIONS should be structs matching the json format 7 | % required by the OpenAI Chat Completions API. 8 | % Ref: https://platform.openai.com/docs/guides/gpt/chat-completions-api 9 | % 10 | % More details on the parameters: https://learn.microsoft.com/en-us/azure/ai-services/openai/how-to/chatgpt 11 | % 12 | % Example 13 | % 14 | % % Create messages struct 15 | % messages = {struct("role", "system",... 16 | % "content", "You are a helpful assistant"); 17 | % struct("role", "user", ... 18 | % "content", "What is the edit distance between hi and hello?")}; 19 | % 20 | % % Create functions struct 21 | % functions = {struct("name", "editDistance", ... 22 | % "description", "Find edit distance between two strings or documents.", ... 23 | % "parameters", struct( ... 24 | % "type", "object", ... 25 | % "properties", struct(... 26 | % "str1", struct(... 27 | % "description", "Source string.", ... 28 | % "type", "string"),... 29 | % "str2", struct(... 30 | % "description", "Target string.", ... 31 | % "type", "string")),... 32 | % "required", ["str1", "str2"]))}; 33 | % 34 | % % Define your API key 35 | % apiKey = "your-api-key-here" 36 | % 37 | % % Send a request 38 | % [text, message] = llms.internal.callAzureChatAPI(messages, functions, APIKey=apiKey) 39 | 40 | % Copyright 2023-2025 The MathWorks, Inc. 41 | 42 | arguments 43 | endpoint 44 | deploymentID 45 | messages 46 | functions 47 | nvp.ToolChoice 48 | nvp.APIVersion 49 | nvp.Temperature 50 | nvp.TopP 51 | nvp.NumCompletions 52 | nvp.StopSequences 53 | nvp.MaxNumTokens 54 | nvp.PresencePenalty 55 | nvp.FrequencyPenalty 56 | nvp.ResponseFormat 57 | nvp.Seed 58 | nvp.APIKey 59 | nvp.TimeOut 60 | nvp.StreamFun 61 | end 62 | 63 | URL = endpoint + "openai/deployments/" + deploymentID + "/chat/completions?api-version=" + nvp.APIVersion; 64 | 65 | parameters = buildParametersCall(messages, functions, nvp); 66 | 67 | [response, streamedText] = llms.internal.sendRequestWrapper(parameters,nvp.APIKey, URL, nvp.TimeOut, nvp.StreamFun); 68 | 69 | % For old models like GPT-3.5, we may have to change the request sent a 70 | % little. Since we cannot detect the model used other than trying to send a 71 | % request, we have to analyze the response instead. 72 | if response.StatusCode=="BadRequest" && ... 73 | isfield(response.Body.Data,"error") && ... 74 | isfield(response.Body.Data.error,"message") && ... 75 | response.Body.Data.error.message == "Unrecognized request argument supplied: max_completion_tokens" 76 | parameters = renameStructField(parameters,'max_completion_tokens','max_tokens'); 77 | [response, streamedText] = llms.internal.sendRequestWrapper(parameters,nvp.APIKey, URL, nvp.TimeOut, nvp.StreamFun); 78 | end 79 | 80 | % If call errors, "choices" will not be part of response.Body.Data, instead 81 | % we get response.Body.Data.error 82 | if response.StatusCode=="OK" 83 | % Outputs the first generation 84 | if isempty(nvp.StreamFun) 85 | message = response.Body.Data.choices(1).message; 86 | else 87 | pat = '{"' + wildcardPattern + '":'; 88 | if contains(streamedText,pat) 89 | s = jsondecode(streamedText); 90 | if contains(s.function.arguments,pat) 91 | prompt = jsondecode(s.function.arguments); 92 | s.function.arguments = prompt; 93 | end 94 | message = struct("role", "assistant", ... 95 | "content",[], ... 96 | "tool_calls",jsondecode(streamedText)); 97 | else 98 | message = struct("role", "assistant", ... 99 | "content", streamedText); 100 | end 101 | end 102 | if isfield(message, "tool_choice") 103 | text = ""; 104 | else 105 | text = string(message.content); 106 | end 107 | else 108 | text = ""; 109 | message = struct(); 110 | end 111 | end 112 | 113 | function parameters = buildParametersCall(messages, functions, nvp) 114 | % Builds a struct in the format that is expected by the API, combining 115 | % MESSAGES, FUNCTIONS and parameters in NVP. 116 | 117 | parameters = struct(); 118 | parameters.messages = messages; 119 | 120 | parameters.stream = ~isempty(nvp.StreamFun); 121 | 122 | if ~isempty(functions) 123 | parameters.tools = functions; 124 | end 125 | 126 | if ~isempty(nvp.ToolChoice) 127 | parameters.tool_choice = nvp.ToolChoice; 128 | end 129 | 130 | if strcmp(nvp.ResponseFormat,"json") 131 | parameters.response_format = struct('type','json_object'); 132 | elseif isstruct(nvp.ResponseFormat) 133 | parameters.response_format = struct('type','json_schema',... 134 | 'json_schema', struct('strict', true, 'name', 'computedFromPrototype', ... 135 | 'schema', llms.internal.jsonSchemaFromPrototype(nvp.ResponseFormat))); 136 | elseif startsWith(string(nvp.ResponseFormat), asManyOfPattern(whitespacePattern)+"{") 137 | parameters.response_format = struct('type','json_schema',... 138 | 'json_schema', struct('strict', true, 'name', 'providedInCall', ... 139 | 'schema', llms.internal.verbatimJSON(nvp.ResponseFormat))); 140 | end 141 | 142 | if ~isempty(nvp.Seed) 143 | parameters.seed = nvp.Seed; 144 | end 145 | 146 | dict = mapNVPToParameters; 147 | 148 | nvpOptions = keys(dict); 149 | for opt = nvpOptions.' 150 | if isfield(nvp, opt) && ~isempty(nvp.(opt)) 151 | parameters.(dict(opt)) = nvp.(opt); 152 | end 153 | end 154 | 155 | if nvp.MaxNumTokens == Inf 156 | parameters = rmfield(parameters,dict("MaxNumTokens")); 157 | end 158 | 159 | end 160 | 161 | function dict = mapNVPToParameters() 162 | dict = dictionary(); 163 | dict("Temperature") = "temperature"; 164 | dict("TopP") = "top_p"; 165 | dict("NumCompletions") = "n"; 166 | dict("StopSequences") = "stop"; 167 | dict("MaxNumTokens") = "max_completion_tokens"; 168 | dict("PresencePenalty") = "presence_penalty"; 169 | dict("FrequencyPenalty") = "frequency_penalty"; 170 | end -------------------------------------------------------------------------------- /+llms/+internal/callOllamaChatAPI.m: -------------------------------------------------------------------------------- 1 | function [text, message, response] = callOllamaChatAPI(model, messages, functions, nvp) 2 | % This function is undocumented and will change in a future release 3 | 4 | %callOllamaChatAPI Calls the Ollama™ chat completions API. 5 | % 6 | % MESSAGES and FUNCTIONS should be structs matching the json format 7 | % required by the Ollama Chat Completions API. 8 | % Ref: https://github.com/ollama/ollama/blob/main/docs/api.md 9 | % 10 | % More details on the parameters: https://github.com/ollama/ollama/blob/main/docs/modelfile.md#valid-parameters-and-values 11 | % 12 | % Example 13 | % 14 | % model = "mistral"; 15 | % 16 | % % Create messages struct 17 | % messages = {struct("role", "system",... 18 | % "content", "You are a helpful assistant"); 19 | % struct("role", "user", ... 20 | % "content", "What is the edit distance between hi and hello?")}; 21 | % 22 | % % Send a request 23 | % [text, message] = llms.internal.callOllamaChatAPI(model, messages) 24 | 25 | % Copyright 2023-2025 The MathWorks, Inc. 26 | 27 | arguments 28 | model 29 | messages 30 | functions 31 | nvp.ToolChoice 32 | nvp.Temperature 33 | nvp.TopP 34 | nvp.MinP 35 | nvp.TopK 36 | nvp.TailFreeSamplingZ 37 | nvp.StopSequences 38 | nvp.MaxNumTokens 39 | nvp.ResponseFormat 40 | nvp.Seed 41 | nvp.TimeOut 42 | nvp.StreamFun 43 | nvp.Endpoint 44 | end 45 | 46 | URL = nvp.Endpoint + "/api/chat"; 47 | if ~startsWith(URL,"http") 48 | URL = "http://" + URL; 49 | end 50 | 51 | % The JSON for StopSequences must have an array, and cannot say "stop": "foo". 52 | % The easiest way to ensure that is to never pass in a scalar … 53 | if isscalar(nvp.StopSequences) 54 | nvp.StopSequences = [nvp.StopSequences, nvp.StopSequences]; 55 | end 56 | 57 | parameters = buildParametersCall(model, messages, functions, nvp); 58 | 59 | [response, streamedText] = llms.internal.sendRequestWrapper(parameters,[],URL,nvp.TimeOut,nvp.StreamFun); 60 | 61 | % If call errors, "choices" will not be part of response.Body.Data, instead 62 | % we get response.Body.Data.error 63 | if response.StatusCode=="OK" 64 | % Outputs the first generation 65 | if isempty(nvp.StreamFun) 66 | if iscell(response.Body.Data) 67 | message = response.Body.Data{1}.message; 68 | else 69 | message = response.Body.Data.message; 70 | end 71 | else 72 | message = struct("role", "assistant", ... 73 | "content", streamedText); 74 | end 75 | text = string(message.content); 76 | else 77 | text = ""; 78 | message = struct(); 79 | end 80 | end 81 | 82 | function parameters = buildParametersCall(model, messages, functions, nvp) 83 | % Builds a struct in the format that is expected by the API, combining 84 | % MESSAGES, FUNCTIONS and parameters in NVP. 85 | 86 | parameters = struct(); 87 | parameters.model = model; 88 | parameters.messages = messages; 89 | 90 | parameters.stream = ~isempty(nvp.StreamFun); 91 | 92 | if ~isempty(functions) 93 | parameters.tools = functions; 94 | end 95 | 96 | if ~isempty(nvp.ToolChoice) 97 | parameters.tool_choice = nvp.ToolChoice; 98 | end 99 | 100 | options = struct; 101 | 102 | if strcmp(nvp.ResponseFormat,"json") 103 | parameters.format = "json"; 104 | elseif isstruct(nvp.ResponseFormat) 105 | parameters.format = llms.internal.jsonSchemaFromPrototype(nvp.ResponseFormat); 106 | elseif startsWith(string(nvp.ResponseFormat), asManyOfPattern(whitespacePattern)+"{") 107 | parameters.format = llms.internal.verbatimJSON(nvp.ResponseFormat); 108 | end 109 | 110 | if ~isempty(nvp.Seed) 111 | options.seed = nvp.Seed; 112 | end 113 | 114 | dict = mapNVPToParameters; 115 | 116 | nvpOptions = keys(dict); 117 | for opt = nvpOptions.' 118 | if isfield(nvp, opt) && ~isempty(nvp.(opt)) && ~isequaln(nvp.(opt),Inf) 119 | options.(dict(opt)) = nvp.(opt); 120 | end 121 | end 122 | 123 | parameters.options = options; 124 | end 125 | 126 | function dict = mapNVPToParameters() 127 | dict = dictionary(); 128 | dict("Temperature") = "temperature"; 129 | dict("TopP") = "top_p"; 130 | dict("MinP") = "min_p"; 131 | dict("TopK") = "top_k"; 132 | dict("TailFreeSamplingZ") = "tfs_z"; 133 | dict("StopSequences") = "stop"; 134 | dict("MaxNumTokens") = "num_predict"; 135 | end 136 | -------------------------------------------------------------------------------- /+llms/+internal/callOpenAIChatAPI.m: -------------------------------------------------------------------------------- 1 | function [text, message, response] = callOpenAIChatAPI(messages, functions, nvp) 2 | % This function is undocumented and will change in a future release 3 | 4 | %callOpenAIChatAPI Calls the openAI chat completions API. 5 | % 6 | % MESSAGES and FUNCTIONS should be structs matching the json format 7 | % required by the OpenAI Chat Completions API. 8 | % Ref: https://platform.openai.com/docs/guides/gpt/chat-completions-api 9 | % 10 | % More details on the parameters: https://platform.openai.com/docs/api-reference/chat/create 11 | % 12 | % Example 13 | % 14 | % % Create messages struct 15 | % messages = {struct("role", "system",... 16 | % "content", "You are a helpful assistant"); 17 | % struct("role", "user", ... 18 | % "content", "What is the edit distance between hi and hello?")}; 19 | % 20 | % % Create functions struct 21 | % functions = {struct("name", "editDistance", ... 22 | % "description", "Find edit distance between two strings or documents.", ... 23 | % "parameters", struct( ... 24 | % "type", "object", ... 25 | % "properties", struct(... 26 | % "str1", struct(... 27 | % "description", "Source string.", ... 28 | % "type", "string"),... 29 | % "str2", struct(... 30 | % "description", "Target string.", ... 31 | % "type", "string")),... 32 | % "required", ["str1", "str2"]))}; 33 | % 34 | % % Define your API key 35 | % apiKey = "your-api-key-here" 36 | % 37 | % % Send a request 38 | % [text, message] = llms.internal.callOpenAIChatAPI(messages, functions, APIKey=apiKey) 39 | 40 | % Copyright 2023-2024 The MathWorks, Inc. 41 | 42 | arguments 43 | messages 44 | functions 45 | nvp.ToolChoice 46 | nvp.ModelName 47 | nvp.Temperature 48 | nvp.TopP 49 | nvp.NumCompletions 50 | nvp.StopSequences 51 | nvp.MaxNumTokens 52 | nvp.PresencePenalty 53 | nvp.FrequencyPenalty 54 | nvp.ResponseFormat 55 | nvp.Seed 56 | nvp.APIKey 57 | nvp.TimeOut 58 | nvp.StreamFun 59 | end 60 | 61 | END_POINT = "https://api.openai.com/v1/chat/completions"; 62 | 63 | parameters = buildParametersCall(messages, functions, nvp); 64 | 65 | [response, streamedText] = llms.internal.sendRequestWrapper(parameters,nvp.APIKey, END_POINT, nvp.TimeOut, nvp.StreamFun); 66 | 67 | % If call errors, "choices" will not be part of response.Body.Data, instead 68 | % we get response.Body.Data.error 69 | if response.StatusCode=="OK" 70 | % Outputs the first generation 71 | if isempty(nvp.StreamFun) 72 | message = response.Body.Data.choices(1).message; 73 | else 74 | pat = '{"' + wildcardPattern + '":'; 75 | if contains(streamedText,pat) 76 | s = jsondecode(streamedText); 77 | if contains(s.function.arguments,pat) 78 | prompt = jsondecode(s.function.arguments); 79 | s.function.arguments = prompt; 80 | end 81 | message = struct("role", "assistant", ... 82 | "content",[], ... 83 | "tool_calls",jsondecode(streamedText)); 84 | else 85 | message = struct("role", "assistant", ... 86 | "content", streamedText); 87 | end 88 | end 89 | if isfield(message, "tool_choice") 90 | text = ""; 91 | else 92 | text = string(message.content); 93 | end 94 | else 95 | text = ""; 96 | message = struct(); 97 | end 98 | end 99 | 100 | function parameters = buildParametersCall(messages, functions, nvp) 101 | % Builds a struct in the format that is expected by the API, combining 102 | % MESSAGES, FUNCTIONS and parameters in NVP. 103 | 104 | parameters = struct(); 105 | parameters.messages = messages; 106 | 107 | parameters.stream = ~isempty(nvp.StreamFun); 108 | 109 | if ~isempty(functions) 110 | parameters.tools = functions; 111 | end 112 | 113 | if ~isempty(nvp.ToolChoice) 114 | parameters.tool_choice = nvp.ToolChoice; 115 | end 116 | 117 | if strcmp(nvp.ResponseFormat,"json") 118 | parameters.response_format = struct('type','json_object'); 119 | elseif isstruct(nvp.ResponseFormat) 120 | parameters.response_format = struct('type','json_schema',... 121 | 'json_schema', struct('strict', true, 'name', 'computedFromPrototype', ... 122 | 'schema', llms.internal.jsonSchemaFromPrototype(nvp.ResponseFormat))); 123 | elseif startsWith(string(nvp.ResponseFormat), asManyOfPattern(whitespacePattern)+"{") 124 | parameters.response_format = struct('type','json_schema',... 125 | 'json_schema', struct('strict', true, 'name', 'providedInCall', ... 126 | 'schema', llms.internal.verbatimJSON(nvp.ResponseFormat))); 127 | end 128 | 129 | if ~isempty(nvp.Seed) 130 | parameters.seed = nvp.Seed; 131 | end 132 | 133 | parameters.model = nvp.ModelName; 134 | 135 | dict = mapNVPToParameters; 136 | 137 | nvpOptions = keys(dict); 138 | 139 | for opt = nvpOptions.' 140 | if isfield(nvp, opt) 141 | parameters.(dict(opt)) = nvp.(opt); 142 | end 143 | end 144 | 145 | if isempty(nvp.StopSequences) 146 | parameters = rmfield(parameters,dict("StopSequences")); 147 | end 148 | 149 | if nvp.MaxNumTokens == Inf 150 | parameters = rmfield(parameters,dict("MaxNumTokens")); 151 | end 152 | 153 | end 154 | 155 | function dict = mapNVPToParameters() 156 | dict = dictionary(); 157 | dict("Temperature") = "temperature"; 158 | dict("TopP") = "top_p"; 159 | dict("NumCompletions") = "n"; 160 | dict("StopSequences") = "stop"; 161 | dict("MaxNumTokens") = "max_completion_tokens"; 162 | dict("PresencePenalty") = "presence_penalty"; 163 | dict("FrequencyPenalty ") = "frequency_penalty"; 164 | end -------------------------------------------------------------------------------- /+llms/+internal/getApiKeyFromNvpOrEnv.m: -------------------------------------------------------------------------------- 1 | function key = getApiKeyFromNvpOrEnv(nvp,envVarName) 2 | % This function is undocumented and will change in a future release 3 | 4 | %getApiKeyFromNvpOrEnv Retrieves an API key from a Name-Value Pair struct or environment variable. 5 | % 6 | % This function takes a struct nvp containing name-value pairs and checks if 7 | % it contains a field called "APIKey". If the field is not found, the 8 | % function attempts to retrieve the API key from an environment variable 9 | % whose name is given as the second argument. If both methods fail, the 10 | % function throws an error. 11 | 12 | % Copyright 2023-2024 The MathWorks, Inc. 13 | 14 | if isfield(nvp, "APIKey") 15 | key = nvp.APIKey; 16 | else 17 | if isenv(envVarName) 18 | key = getenv(envVarName); 19 | else 20 | error("llms:keyMustBeSpecified", llms.utils.errorMessageCatalog.getMessage("llms:keyMustBeSpecified", envVarName)); 21 | end 22 | end 23 | end 24 | -------------------------------------------------------------------------------- /+llms/+internal/gptPenalties.m: -------------------------------------------------------------------------------- 1 | classdef (Abstract) gptPenalties 2 | % This class is undocumented and will change in a future release 3 | 4 | % Copyright 2024 The MathWorks, Inc. 5 | properties 6 | %PRESENCEPENALTY Penalty for using a token in the response that has already been used. 7 | PresencePenalty {llms.utils.mustBeValidPenalty} = 0 8 | 9 | %FREQUENCYPENALTY Penalty for using a token that is frequent in the training data. 10 | FrequencyPenalty {llms.utils.mustBeValidPenalty} = 0 11 | end 12 | end 13 | -------------------------------------------------------------------------------- /+llms/+internal/hasTools.m: -------------------------------------------------------------------------------- 1 | classdef (Abstract) hasTools 2 | % This class is undocumented and will change in a future release 3 | 4 | % Copyright 2023-2024 The MathWorks, Inc. 5 | 6 | properties (SetAccess=protected) 7 | %FunctionNames Names of the functions that the model can request calls 8 | FunctionNames 9 | end 10 | 11 | properties (Access=protected) 12 | Tools 13 | FunctionsStruct 14 | end 15 | 16 | methods(Hidden) 17 | function mustBeValidFunctionCall(this, functionCall) 18 | if ~isempty(functionCall) 19 | mustBeTextScalar(functionCall); 20 | if isempty(this.FunctionNames) 21 | error("llms:mustSetFunctionsForCall", llms.utils.errorMessageCatalog.getMessage("llms:mustSetFunctionsForCall")); 22 | end 23 | mustBeMember(functionCall, ["none","auto", this.FunctionNames]); 24 | end 25 | end 26 | 27 | function toolChoice = convertToolChoice(this, toolChoice) 28 | % if toolChoice is empty 29 | if isempty(toolChoice) 30 | % if Tools is not empty, the default is 'auto'. 31 | if ~isempty(this.Tools) 32 | toolChoice = "auto"; 33 | end 34 | elseif ~ismember(toolChoice,["auto","none"]) 35 | % if toolChoice is not empty, then it must be "auto", "none" or in the format 36 | % {"type": "function", "function": {"name": "my_function"}} 37 | toolChoice = struct("type","function","function",struct("name",toolChoice)); 38 | end 39 | 40 | end 41 | end 42 | end 43 | -------------------------------------------------------------------------------- /+llms/+internal/jsonSchemaFromPrototype.m: -------------------------------------------------------------------------------- 1 | function schema = jsonSchemaFromPrototype(prototype) 2 | % This function is undocumented and will change in a future release 3 | 4 | %jsonSchemaFromPrototype Create a JSON Schema matching given prototype 5 | 6 | % Copyright 2024 The MathWorks Inc. 7 | 8 | if ~isstruct(prototype) 9 | error("llms:incorrectResponseFormat", ... 10 | llms.utils.errorMessageCatalog.getMessage("llms:incorrectResponseFormat")); 11 | end 12 | 13 | % OpenAI requires top-level to be "type":"object" 14 | if ~isscalar(prototype) 15 | prototype = struct("result",{prototype}); 16 | end 17 | 18 | schema = recursiveSchemaFromPrototype(prototype); 19 | end 20 | 21 | function schema = recursiveSchemaFromPrototype(prototype) 22 | if ~isscalar(prototype) 23 | schema = struct("type","array","items",recursiveSchemaFromPrototype(prototype(1))); 24 | elseif isstruct(prototype) 25 | schema = schemaFromStruct(prototype); 26 | elseif isstring(prototype) || iscellstr(prototype) 27 | schema = struct("type","string"); 28 | elseif isinteger(prototype) 29 | schema = struct("type","integer"); 30 | elseif isnumeric(prototype) && ~isa(prototype,'dlarray') 31 | schema = struct("type","number"); 32 | elseif islogical(prototype) 33 | schema = struct("type","boolean"); 34 | elseif iscategorical(prototype) 35 | schema = struct("type","string", ... 36 | "enum",{categories(prototype)}); 37 | elseif ismissing(prototype) 38 | schema = struct("type","null"); 39 | else 40 | error("llms:unsupportedDatatypeInPrototype", ... 41 | llms.utils.errorMessageCatalog.getMessage("llms:unsupportedDatatypeInPrototype", class(prototype))); 42 | end 43 | end 44 | 45 | function schema = schemaFromStruct(prototype) 46 | fields = string(fieldnames(prototype)); 47 | 48 | properties = struct(); 49 | for fn=fields(:).' 50 | properties.(fn) = recursiveSchemaFromPrototype(prototype.(fn)); 51 | end 52 | 53 | % to make jsonencode encode an array 54 | if isscalar(fields) 55 | fields = {{fields}}; 56 | end 57 | 58 | schema = struct( ... 59 | "type","object", ... 60 | "properties",properties, ... 61 | "required",fields, ... 62 | "additionalProperties",false); 63 | end 64 | -------------------------------------------------------------------------------- /+llms/+internal/needsAPIKey.m: -------------------------------------------------------------------------------- 1 | classdef (Abstract) needsAPIKey 2 | % This class is undocumented and will change in a future release 3 | 4 | % Copyright 2023-2024 The MathWorks, Inc. 5 | 6 | properties (Access=protected) 7 | APIKey 8 | end 9 | end 10 | -------------------------------------------------------------------------------- /+llms/+internal/reformatOutput.m: -------------------------------------------------------------------------------- 1 | function result = reformatOutput(result,responseFormat) 2 | % This function is undocumented and will change in a future release 3 | 4 | %reformatOutput - Create the expected struct for structured output 5 | 6 | % Copyright 2024 The MathWorks, Inc. 7 | 8 | if isstruct(responseFormat) 9 | try 10 | result = jsondecode(result); 11 | catch 12 | error("llms:apiReturnedIncompleteJSON",llms.utils.errorMessageCatalog.getMessage("llms:apiReturnedIncompleteJSON",result)) 13 | end 14 | end 15 | if isstruct(responseFormat) && ~isscalar(responseFormat) 16 | result = result.result; 17 | end 18 | if isstruct(responseFormat) 19 | result = llms.internal.useSameFieldTypes(result,responseFormat); 20 | end 21 | end 22 | -------------------------------------------------------------------------------- /+llms/+internal/sendRequest.m: -------------------------------------------------------------------------------- 1 | function [response, streamedText] = sendRequest(parameters, token, endpoint, timeout, streamFun) 2 | % This function is undocumented and will change in a future release 3 | 4 | %sendRequest Sends a request to an ENDPOINT using PARAMETERS and 5 | % api key TOKEN. TIMEOUT is the number of seconds to wait for initial 6 | % server connection. STREAMFUN is an optional callback function. 7 | 8 | % Copyright 2023-2025 The MathWorks, Inc. 9 | 10 | arguments 11 | parameters 12 | token 13 | endpoint 14 | timeout 15 | streamFun = [] 16 | end 17 | 18 | % Define the headers for the API request 19 | 20 | headers = matlab.net.http.HeaderField('Content-Type', 'application/json'); 21 | if ~isempty(token) 22 | headers = [headers ... 23 | matlab.net.http.HeaderField('Authorization', "Bearer " + token)... 24 | matlab.net.http.HeaderField('api-key',token)]; 25 | end 26 | 27 | % Define the request message 28 | request = matlab.net.http.RequestMessage('post',headers,parameters); 29 | 30 | % set the timeout 31 | httpOpts = matlab.net.http.HTTPOptions; 32 | httpOpts.ConnectTimeout = timeout; 33 | httpOpts.ResponseTimeout = timeout; 34 | 35 | % Send the request and store the response 36 | if isempty(streamFun) 37 | response = send(request, matlab.net.URI(endpoint),httpOpts); 38 | streamedText = ""; 39 | else 40 | % User defined a stream callback function 41 | consumer = llms.stream.responseStreamer(streamFun); 42 | response = send(request, matlab.net.URI(endpoint),httpOpts,consumer); 43 | streamedText = consumer.ResponseText; 44 | end 45 | 46 | % When the server sends jsonl or ndjson back, we do not get the automatic conversion. 47 | if isnumeric(response.Body.Data) 48 | txt = native2unicode(response.Body.Data.',"UTF-8"); 49 | % convert to JSON array 50 | json = "[" + replace(strtrim(txt),newline,',') + "]"; 51 | try 52 | response.Body.Data = jsondecode(json); 53 | end 54 | end 55 | end 56 | -------------------------------------------------------------------------------- /+llms/+internal/sendRequestWrapper.m: -------------------------------------------------------------------------------- 1 | function [response, streamedText] = sendRequestWrapper(varargin) 2 | % This function is undocumented and will change in a future release 3 | 4 | % A wrapper around sendRequest to have a test seam 5 | [response, streamedText] = llms.internal.sendRequest(varargin{:}); 6 | -------------------------------------------------------------------------------- /+llms/+internal/textGenerator.m: -------------------------------------------------------------------------------- 1 | classdef (Abstract) textGenerator 2 | % This class is undocumented and will change in a future release 3 | 4 | % Copyright 2023-2024 The MathWorks, Inc. 5 | 6 | properties 7 | %Temperature Temperature of generation. 8 | Temperature {llms.utils.mustBeValidTemperature} = 1 9 | 10 | %TopP Top probability mass to consider for generation. 11 | TopP {llms.utils.mustBeValidProbability} = 1 12 | 13 | %StopSequences Sequences to stop the generation of tokens. 14 | StopSequences {llms.utils.mustBeValidStop} = {} 15 | end 16 | 17 | properties (SetAccess=protected) 18 | %TimeOut Connection timeout in seconds (default 10 secs) 19 | TimeOut 20 | 21 | %SystemPrompt System prompt. 22 | SystemPrompt = [] 23 | 24 | %ResponseFormat Response format, "text" or "json" 25 | ResponseFormat 26 | end 27 | 28 | properties (Access=protected) 29 | StreamFun 30 | end 31 | 32 | methods 33 | function hObj = set.StopSequences(hObj,value) 34 | hObj.StopSequences = string(value); 35 | end 36 | end 37 | end 38 | -------------------------------------------------------------------------------- /+llms/+internal/useSameFieldTypes.m: -------------------------------------------------------------------------------- 1 | function data = useSameFieldTypes(data,prototype) 2 | % This function is undocumented and will change in a future release 3 | 4 | %useSameFieldTypes Change struct field data types to match prototype 5 | 6 | % Copyright 2024 The MathWorks Inc. 7 | 8 | if ~isscalar(data) 9 | data = arrayfun( ... 10 | @(d) llms.internal.useSameFieldTypes(d,prototype), data, ... 11 | UniformOutput=false); 12 | data = vertcat(data{:}); 13 | return 14 | end 15 | 16 | data = alignTypes(data, prototype); 17 | end 18 | 19 | function data = alignTypes(data, prototype) 20 | switch class(prototype) 21 | case "struct" 22 | prototype = prototype(1); 23 | if isscalar(data) 24 | if isequal(sort(fieldnames(data)),sort(fieldnames(prototype))) 25 | for field_c = fieldnames(data).' 26 | field = field_c{1}; 27 | data.(field) = alignTypes(data.(field),prototype.(field)); 28 | end 29 | end 30 | else 31 | data = arrayfun(@(d) alignTypes(d,prototype), data, UniformOutput=false); 32 | data = vertcat(data{:}); 33 | end 34 | case "string" 35 | data = string(data); 36 | case "categorical" 37 | data = categorical(string(data),categories(prototype)); 38 | case "missing" 39 | data = missing; 40 | otherwise 41 | data = cast(data,"like",prototype); 42 | end 43 | end 44 | -------------------------------------------------------------------------------- /+llms/+internal/verbatimJSON.m: -------------------------------------------------------------------------------- 1 | classdef verbatimJSON 2 | % This class is undocumented and will change in a future release 3 | 4 | % Copyright 2024 The MathWorks, Inc. 5 | properties 6 | Value (1,1) string 7 | end 8 | methods 9 | function obj = verbatimJSON(str) 10 | obj.Value = str; 11 | end 12 | function json = jsonencode(obj,varargin) 13 | json = obj.Value; 14 | end 15 | end 16 | end 17 | -------------------------------------------------------------------------------- /+llms/+openai/models.m: -------------------------------------------------------------------------------- 1 | function models = models 2 | %MODELS - supported OpenAI models 3 | 4 | % Copyright 2024-2025 The MathWorks, Inc. 5 | models = [... 6 | "gpt-4o","gpt-4o-2024-05-13","gpt-4o-2024-08-06","gpt-4o-2024-11-20",... 7 | "chatgpt-4o-latest",... 8 | "gpt-4o-mini","gpt-4o-mini-2024-07-18",... 9 | "gpt-4-turbo","gpt-4-turbo-2024-04-09",... 10 | "gpt-4-turbo-preview","gpt-4-0125-preview",... 11 | "gpt-4.1","gpt-4.1-2025-04-14",... 12 | "gpt-4.1-mini","gpt-4.1-mini-2025-04-14",... 13 | "gpt-4.1-nano","gpt-4.1-nano-2025-04-14",... 14 | "gpt-4","gpt-4-0613",... 15 | "gpt-4-1106-preview",... 16 | "gpt-3.5-turbo","gpt-3.5-turbo-0125","gpt-3.5-turbo-1106",... 17 | "gpt-3.5-turbo-16k",... 18 | "o1-preview","o1-preview-2024-09-12",... 19 | "o1-mini","o1-mini-2024-09-12",... 20 | "o1","o1-2024-12-17",... 21 | "o3-mini", "o3-mini-2025-01-31",... 22 | "o4-mini", "o4-mini-2025-04-16",... 23 | "o3", "o3-2025-04-16",... 24 | ]; 25 | end 26 | -------------------------------------------------------------------------------- /+llms/+openai/validateMessageSupported.m: -------------------------------------------------------------------------------- 1 | function validateMessageSupported(message, model); 2 | %validateMessageSupported - check that message is supported by model 3 | 4 | % Copyright 2024 The MathWorks, Inc. 5 | 6 | % only certain models support image generation 7 | if iscell(message.content) && any(cellfun(@(x) isfield(x,"image_url"), message.content)) 8 | if ~ismember(model,["gpt-4-turbo","gpt-4-turbo-2024-04-09",... 9 | "gpt-4o-mini","gpt-4o-mini-2024-07-18",... 10 | "gpt-4o","gpt-4o-2024-05-13"]) 11 | error("llms:invalidContentTypeForModel", ... 12 | llms.utils.errorMessageCatalog.getMessage("llms:invalidContentTypeForModel", "Image content", model)); 13 | end 14 | end 15 | end 16 | -------------------------------------------------------------------------------- /+llms/+openai/validateResponseFormat.m: -------------------------------------------------------------------------------- 1 | function validateResponseFormat(format,model,messages) 2 | %validateResponseFormat - Validate requested response format is available for selected model 3 | % Not all OpenAI models support JSON output 4 | 5 | % Copyright 2024 The MathWorks, Inc. 6 | 7 | if ischar(format) | iscellstr(format) %#ok 8 | format = string(format); 9 | end 10 | 11 | if isequal(format, "json") 12 | if ismember(model,["gpt-4","gpt-4-0613","o1-preview","o1-mini","o1"]) 13 | error("llms:invalidOptionAndValueForModel", ... 14 | llms.utils.errorMessageCatalog.getMessage("llms:invalidOptionAndValueForModel", "ResponseFormat", "json", model)); 15 | elseif nargin > 2 16 | % OpenAI says you need to mention JSON somewhere in the input 17 | if ~any(cellfun(@(s) contains(s.content,"json","IgnoreCase",true), messages)) 18 | error("llms:warningJsonInstruction", ... 19 | llms.utils.errorMessageCatalog.getMessage("llms:warningJsonInstruction")) 20 | end 21 | end 22 | elseif requestsStructuredOutput(format) 23 | if ~startsWith(model,"gpt-4o") 24 | error("llms:noStructuredOutputForModel", ... 25 | llms.utils.errorMessageCatalog.getMessage("llms:noStructuredOutputForModel", model)); 26 | end 27 | end 28 | end 29 | 30 | function tf = requestsStructuredOutput(format) 31 | % If the response format is not "text" or "json", then the input is interpreted as structured output. 32 | tf = ~isequal(format, "text") & ~isequal(format, "json"); 33 | end 34 | -------------------------------------------------------------------------------- /+llms/+stream/responseStreamer.m: -------------------------------------------------------------------------------- 1 | classdef responseStreamer < matlab.net.http.io.BinaryConsumer 2 | %responseStreamer Responsible for obtaining the streaming results from the 3 | %API 4 | 5 | % Copyright 2023-2025 The MathWorks, Inc. 6 | 7 | properties 8 | ResponseText 9 | StreamFun 10 | Incomplete = "" 11 | end 12 | 13 | methods 14 | function this = responseStreamer(streamFun) 15 | this.StreamFun = streamFun; 16 | end 17 | end 18 | 19 | methods (Access=protected) 20 | function length = start(this) 21 | if this.Response.StatusCode ~= matlab.net.http.StatusCode.OK 22 | length = 0; 23 | else 24 | length = this.start@matlab.net.http.io.BinaryConsumer; 25 | end 26 | end 27 | end 28 | 29 | methods 30 | function [len,stop] = putData(this, data) 31 | [len,stop] = this.putData@matlab.net.http.io.BinaryConsumer(data); 32 | stop = doPutData(this, data, stop); 33 | end 34 | end 35 | 36 | methods (Access=?tresponseStreamer) 37 | function stop = doPutData(this, data, stop) 38 | % Extract out the response text from the message 39 | str = native2unicode(data','UTF-8'); 40 | str = this.Incomplete + string(str); 41 | this.Incomplete = ""; 42 | str = split(str,newline); 43 | str = str(strlength(str)>0); 44 | str = erase(str,"data: "); 45 | 46 | for i = 1:length(str) 47 | if strcmp(str{i},'[DONE]') 48 | stop = true; 49 | return 50 | else 51 | try 52 | json = jsondecode(str{i}); 53 | catch ME 54 | if i == length(str) 55 | this.Incomplete = str{i}; 56 | return; 57 | end 58 | error("llms:stream:responseStreamer:InvalidInput", ... 59 | llms.utils.errorMessageCatalog.getMessage(... 60 | "llms:stream:responseStreamer:InvalidInput", str{i})); 61 | end 62 | if isfield(json,'choices') 63 | if isempty(json.choices) 64 | continue; 65 | end 66 | if isfield(json.choices,'finish_reason') && ... 67 | ischar(json.choices.finish_reason) && ismember(json.choices.finish_reason,["stop","tool_calls"]) 68 | stop = true; 69 | return 70 | else 71 | if isfield(json.choices,"delta") && ... 72 | isfield(json.choices.delta,"tool_calls") 73 | if isfield(json.choices.delta.tool_calls,"id") 74 | id = json.choices.delta.tool_calls.id; 75 | type = json.choices.delta.tool_calls.type; 76 | fcn = json.choices.delta.tool_calls.function; 77 | s = struct('id',id,'type',type,'function',fcn); 78 | txt = jsonencode(s); 79 | else 80 | s = jsondecode(this.ResponseText); 81 | args = json.choices.delta.tool_calls.function.arguments; 82 | s.function.arguments = [s.function.arguments args]; 83 | txt = jsonencode(s); 84 | end 85 | this.StreamFun(''); 86 | this.ResponseText = txt; 87 | elseif isfield(json.choices,"delta") && ... 88 | isfield(json.choices.delta,"content") 89 | txt = json.choices.delta.content; 90 | this.StreamFun(txt); 91 | this.ResponseText = [this.ResponseText txt]; 92 | end 93 | end 94 | else 95 | txt = json.message.content; 96 | if strlength(txt) > 0 97 | this.StreamFun(txt); 98 | this.ResponseText = [this.ResponseText txt]; 99 | end 100 | if isfield(json.message,"tool_calls") 101 | s = json.message.tool_calls; 102 | txt = jsonencode(s); 103 | this.StreamFun(''); 104 | this.ResponseText = [this.ResponseText txt]; 105 | end 106 | if isfield(json,"done") 107 | stop = json.done; 108 | end 109 | end 110 | end 111 | end 112 | end 113 | end 114 | end -------------------------------------------------------------------------------- /+llms/+utils/errorMessageCatalog.m: -------------------------------------------------------------------------------- 1 | classdef errorMessageCatalog 2 | %errorMessageCatalog Stores the error messages from this repository 3 | 4 | % Copyright 2023-2025 The MathWorks, Inc. 5 | 6 | properties(Constant) 7 | %CATALOG dictionary mapping error ids to error msgs 8 | Catalog = buildErrorMessageCatalog; 9 | end 10 | 11 | methods(Static) 12 | function msg = getMessage(messageId, slot) 13 | %getMessage returns error message given a messageID and a SLOT. 14 | % The value in SLOT should be ordered, where the n-th element 15 | % will replace the value "{n}". 16 | 17 | arguments 18 | messageId {mustBeNonzeroLengthText} 19 | end 20 | arguments(Repeating) 21 | slot {mustBeNonzeroLengthText} 22 | end 23 | 24 | msg = llms.utils.errorMessageCatalog.Catalog(messageId); 25 | if ~isempty(slot) 26 | for i=1:numel(slot) 27 | msg = replace(msg,"{"+i+"}", slot{i}); 28 | end 29 | end 30 | end 31 | 32 | function s = createCatalog() 33 | %createCatalog will run the initialization code and return the catalog 34 | % This is only meant to get more correct test coverage reports: 35 | % The test coverage reports do not include the properties initialization 36 | % for Catalog from above, so we have a test seam here to re-run it 37 | % within the framework, where it is reported. 38 | s = buildErrorMessageCatalog; 39 | end 40 | end 41 | end 42 | 43 | function catalog = buildErrorMessageCatalog 44 | catalog = dictionary("string", "string"); 45 | catalog("llms:mustBeUnique") = "Values must be unique."; 46 | catalog("llms:mustBeVarName") = "Parameter name must begin with a letter and contain not more than 'namelengthmax' characters."; 47 | catalog("llms:parameterMustBeUnique") = "A parameter name equivalent to '{1}' already exists in Parameters. Redefining a parameter is not allowed."; 48 | catalog("llms:mustBeAssistantCall") = "Input struct must contain field 'role' with value 'assistant', and field 'content'."; 49 | catalog("llms:mustBeAssistantWithContent") = "Input struct must contain field 'content' containing text with one or more characters."; 50 | catalog("llms:mustBeAssistantWithIdAndFunction") = "Field 'tool_call' must be a struct with fields 'id' and 'function'."; 51 | catalog("llms:mustBeAssistantWithNameAndArguments") = "Field 'function' must be a struct with fields 'name' and 'arguments'."; 52 | catalog("llms:assistantMustHaveTextName") = "Field 'name' must be text with one or more characters."; 53 | catalog("llms:assistantMustHaveTextOrStructArguments") = "Field 'arguments' must be text with one or more characters, or a scalar struct."; 54 | catalog("llms:mustBeValidIndex") = "Index exceeds the number of array elements. Index must be less than or equal to {1}."; 55 | catalog("llms:removeFromEmptyHistory") = "Unable to remove message from empty message history."; 56 | catalog("llms:stopSequencesMustHaveMax4Elements") = "Number of stop sequences must be less than or equal to 4."; 57 | catalog("llms:endpointMustBeSpecified") = "Unable to find endpoint. Either set environment variable AZURE_OPENAI_ENDPOINT or specify name-value argument ""Endpoint""."; 58 | catalog("llms:deploymentMustBeSpecified") = "Unable to find deployment name. Either set environment variable AZURE_OPENAI_DEPLOYMENT or specify name-value argument ""DeploymentID""."; 59 | catalog("llms:keyMustBeSpecified") = "Unable to find API key. Either set environment variable {1} or specify name-value argument ""APIKey""."; 60 | catalog("llms:mustHaveMessages") = "Message history must not be empty."; 61 | catalog("llms:mustSetFunctionsForCall") = "When no functions are defined, ToolChoice must not be specified."; 62 | catalog("llms:mustBeMessagesOrTxt") = "Message must be nonempty string, character array, cell array of character vectors, or messageHistory object."; 63 | catalog("llms:invalidOptionAndValueForModel") = "'{1}' with value '{2}' is not supported for model ""{3}""."; 64 | catalog("llms:noStructuredOutputForModel") = "Structured output is not supported for model ""{1}""."; 65 | catalog("llms:noStructuredOutputForAzureDeployment") = "Structured output is not supported for deployment ""{1}""."; 66 | catalog("llms:structuredOutputRequiresAPI") = "Structured output is not supported for API version ""{1}"". Use APIVersion=""2024-08-01-preview"" or newer."; 67 | catalog("llms:invalidOptionForModel") = "Invalid argument name {1} for model ""{2}""."; 68 | catalog("llms:invalidContentTypeForModel") = "{1} is not supported for model ""{2}""."; 69 | catalog("llms:functionNotAvailableForModel") = "Image editing is not supported for model ""{1}""."; 70 | catalog("llms:promptLimitCharacter") = "Prompt must contain at most {1} characters for model ""{2}""."; 71 | catalog("llms:pngExpected") = "Image must be a PNG file (*.png)."; 72 | catalog("llms:warningJsonInstruction") = "When using JSON mode, you must also prompt the model to produce JSON yourself via a system or user message."; 73 | catalog("llms:apiReturnedError") = "Server returned error indicating: ""{1}"""; 74 | catalog("llms:apiReturnedIncompleteJSON") = "Generated output is not valid JSON: ""{1}"""; 75 | catalog("llms:dimensionsMustBeSmallerThan") = "Dimensions must be less than or equal to {1}."; 76 | catalog("llms:stream:responseStreamer:InvalidInput") = "Input does not have the expected json format, got ""{1}""."; 77 | catalog("llms:unsupportedDatatypeInPrototype") = "Invalid data type ''{1}'' in prototype. Prototype must be a struct, composed of numerical, string, logical, categorical, or struct."; 78 | catalog("llms:incorrectResponseFormat") = "Invalid response format. Response format must be ""text"", ""json"", a struct, or a string with a JSON Schema definition."; 79 | catalog("llms:OllamaStructuredOutputNeeds05") = "Structured output is not supported for Ollama version {1}. Use version 0.5.0 or newer."; 80 | catalog("llms:noOllamaFound") = "Unable to connect to Ollama. Check that {1} is a valid endpoint and ensure that Ollama is installed and running."; 81 | end 82 | -------------------------------------------------------------------------------- /+llms/+utils/isUnique.m: -------------------------------------------------------------------------------- 1 | function tf = isUnique(values) 2 | % This function is undocumented and will change in a future release 3 | 4 | % Simple function to check if value is unique 5 | 6 | % Copyright 2023 The MathWorks, Inc. 7 | tf = numel(values)==numel(unique(values)); 8 | end 9 | -------------------------------------------------------------------------------- /+llms/+utils/mustBeNonzeroLengthTextScalar.m: -------------------------------------------------------------------------------- 1 | function mustBeNonzeroLengthTextScalar(content) 2 | % This function is undocumented and will change in a future release 3 | 4 | % Simple function to check if value is empty or text scalar 5 | 6 | % Copyright 2024 The MathWorks, Inc. 7 | mustBeNonzeroLengthText(content) 8 | if iscellstr(content) 9 | content = string(content); 10 | end 11 | mustBeTextScalar(content) 12 | end 13 | -------------------------------------------------------------------------------- /+llms/+utils/mustBeResponseFormat.m: -------------------------------------------------------------------------------- 1 | function mustBeResponseFormat(format) 2 | % This function is undocumented and will change in a future release 3 | 4 | % Copyright 2024 The MathWorks Inc. 5 | if isstring(format) || ischar(format) || iscellstr(format) 6 | mustBeTextScalar(format); 7 | if ~ismember(format,["text","json"]) && ... 8 | ~startsWith(format,asManyOfPattern(whitespacePattern)+"{") 9 | error("llms:incorrectResponseFormat", ... 10 | llms.utils.errorMessageCatalog.getMessage("llms:incorrectResponseFormat")); 11 | end 12 | elseif ~isstruct(format) 13 | error("llms:incorrectResponseFormat", ... 14 | llms.utils.errorMessageCatalog.getMessage("llms:incorrectResponseFormat")); 15 | end 16 | end 17 | -------------------------------------------------------------------------------- /+llms/+utils/mustBeTextOrEmpty.m: -------------------------------------------------------------------------------- 1 | function mustBeTextOrEmpty(value) 2 | % This function is undocumented and will change in a future release 3 | 4 | % Simple function to check if value is empty or text scalar 5 | 6 | % Copyright 2023 The MathWorks, Inc. 7 | if ~isempty(value) 8 | mustBeTextScalar(value) 9 | end 10 | end 11 | -------------------------------------------------------------------------------- /+llms/+utils/mustBeValidPenalty.m: -------------------------------------------------------------------------------- 1 | function mustBeValidPenalty(value) 2 | % This function is undocumented and will change in a future release 3 | 4 | % Copyright 2024 The MathWorks, Inc. 5 | validateattributes(value, {'numeric'}, {'real', 'scalar', 'nonsparse', '<=', 2, '>=', -2}) 6 | end 7 | -------------------------------------------------------------------------------- /+llms/+utils/mustBeValidProbability.m: -------------------------------------------------------------------------------- 1 | function mustBeValidProbability(value) 2 | % This function is undocumented and will change in a future release 3 | 4 | % Copyright 2024 The MathWorks, Inc. 5 | validateattributes(value, {'numeric'}, {'real', 'scalar', 'nonnegative', 'nonsparse', '<=', 1}) 6 | end 7 | -------------------------------------------------------------------------------- /+llms/+utils/mustBeValidStop.m: -------------------------------------------------------------------------------- 1 | function mustBeValidStop(value) 2 | % This function is undocumented and will change in a future release 3 | 4 | % Copyright 2024 The MathWorks, Inc. 5 | if ~isempty(value) 6 | mustBeVector(value); 7 | mustBeNonzeroLengthText(value); 8 | value = string(value); 9 | % This restriction is set by the OpenAI API 10 | if numel(value)>4 11 | error("llms:stopSequencesMustHaveMax4Elements", llms.utils.errorMessageCatalog.getMessage("llms:stopSequencesMustHaveMax4Elements")); 12 | end 13 | end 14 | end 15 | -------------------------------------------------------------------------------- /+llms/+utils/mustBeValidTemperature.m: -------------------------------------------------------------------------------- 1 | function mustBeValidTemperature(value) 2 | % This function is undocumented and will change in a future release 3 | 4 | % Copyright 2024 The MathWorks, Inc. 5 | validateattributes(value, {'numeric'}, {'real', 'scalar', 'nonnegative', 'nonsparse', '<=', 2}) 6 | end 7 | -------------------------------------------------------------------------------- /+llms/+utils/requestsStructuredOutput.m: -------------------------------------------------------------------------------- 1 | function tf = requestsStructuredOutput(format) 2 | % This function is undocumented and will change in a future release 3 | 4 | % Simple function to check if requested format triggers structured output 5 | 6 | % Copyright 2024 The MathWorks, Inc. 7 | tf = isstruct(format) || startsWith(format,asManyOfPattern(whitespacePattern)+"{"); 8 | end 9 | 10 | -------------------------------------------------------------------------------- /+llms/jsonSchemaFromPrototype.m: -------------------------------------------------------------------------------- 1 | function str = jsonSchemaFromPrototype(prototype) 2 | %jsonSchemaFromPrototype - create JSON Schema from prototype 3 | % STR = llms.jsonSchemaFromPrototype(PROTOTYPE) creates a JSON Schema 4 | % that can be used with openAIChat ResponseFormat. 5 | % 6 | % Example: 7 | % >> prototype = struct("name","Alena Zlatkov","age",32); 8 | % >> schema = llms.jsonSchemaFromPrototype(prototype); 9 | % >> generate(openAIChat, "Generate a random person", ResponseFormat=schema) 10 | % 11 | % ans = "{"name":"Emily Carter","age":29}" 12 | 13 | % Copyright 2024 The MathWorks, Inc. 14 | 15 | str = string(jsonencode(llms.internal.jsonSchemaFromPrototype(prototype),PrettyPrint=true)); 16 | end 17 | -------------------------------------------------------------------------------- /.gitattributes: -------------------------------------------------------------------------------- 1 | *.fig binary 2 | *.mat binary 3 | *.mdl binary diff merge=mlAutoMerge 4 | *.mdlp binary 5 | *.mexa64 binary 6 | *.mexw64 binary 7 | *.mexmaci64 binary 8 | *.mlapp binary 9 | *.mldatx binary 10 | *.mlproj binary 11 | *.mlx binary 12 | *.p binary 13 | *.sfx binary 14 | *.sldd binary 15 | *.slreqx binary merge=mlAutoMerge 16 | *.slmx binary merge=mlAutoMerge 17 | *.sltx binary 18 | *.slxc binary 19 | *.slx binary merge=mlAutoMerge 20 | *.slxp binary 21 | 22 | ## Other common binary file types 23 | *.docx binary 24 | *.exe binary 25 | *.jpg binary 26 | *.pdf binary 27 | *.png binary 28 | *.xlsx binary 29 | -------------------------------------------------------------------------------- /.githooks/pre-commit: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | set -e 4 | 5 | cd $(git rev-parse --show-toplevel) 6 | pwd 7 | 8 | # For all commits of mlx files, create corresponding Markdown (md) files. 9 | # If the mlx files are in .../mlx-scripts/*.mlx, the corresponding 10 | # md files will go into .../*.md. 11 | # 12 | # This script assumes that the mlx files as currently in the file system 13 | # are what is being committed, instead of doing a lot of extra work to 14 | # get them from the stage area. 15 | # 16 | # Note that this script will not remove media files. If an mlx has 17 | # fewer plots at some point in the future, there will be file system 18 | # cruft. Which doesn't hurt the md display in GitHub or elswehere. 19 | changedMlxFiles=`git diff --cached --name-only --diff-filter=d '*.mlx'` 20 | 21 | if [ -n "$changedMlxFiles" ]; then 22 | # Keep the line break here, we replace end-of-line with "' '" to get the quotes right 23 | matlab -batch "for file = {'${changedMlxFiles// 24 | /' '}'}, export(file{1},replace(erase(file{1},'mlx-scripts'),'.mlx','.md')); end" 25 | tmp=${changedMlxFiles//mlx-scripts\//} 26 | mdFiles=${tmp//.mlx/.md} 27 | for file in $mdFiles; do 28 | if [ -d ${file%.md}_media ]; then 29 | git add ${file%.md}_media/ 30 | fi 31 | perl -pi -e "\$cnt++ if /^#/; " \ 32 | -e "\$_ .= \"\nTo run the code shown on this page, open the MLX file in MATLAB®: [mlx-scripts/$(basename $file .md).mlx](mlx-scripts/$(basename $file .md).mlx) \n\" if /^#/ && \$cnt==1;" \ 33 | $file 34 | done 35 | git add $mdFiles 36 | fi 37 | -------------------------------------------------------------------------------- /.github/CODEOWNERS: -------------------------------------------------------------------------------- 1 | # Code owners, to get auto-filled reviewer lists 2 | 3 | # To start with, we just assume everyone in the core team is included on all reviews 4 | * @adulai @ccreutzi @debymf @MiriamScharnke @vpapanasta @emanuzzi 5 | -------------------------------------------------------------------------------- /.github/workflows/ci.yml: -------------------------------------------------------------------------------- 1 | name: Run MATLAB Tests on GitHub-Hosted Runner 2 | on: [push] 3 | jobs: 4 | test: 5 | name: Run MATLAB Tests and Generate Artifacts 6 | runs-on: ubuntu-latest 7 | steps: 8 | - name: Check out repository 9 | uses: actions/checkout@v4 10 | - name: Install Ollama 11 | run: | 12 | curl -fsSL https://ollama.com/install.sh | sudo -E sh 13 | - name: Start serving 14 | run: | 15 | # Run the background, there is no way to daemonise at the moment 16 | ollama serve & 17 | # Run a second server to test different endpoint 18 | OLLAMA_HOST=127.0.0.1:11435 OLLAMA_MODELS=/tmp/ollama/models ollama serve & 19 | 20 | # A short pause is required before the HTTP port is opened 21 | sleep 5 22 | 23 | # This endpoint blocks until ready 24 | time curl -i http://localhost:11434 25 | time curl -i http://localhost:11435 26 | 27 | # For debugging, record Ollama version 28 | ollama --version 29 | 30 | - name: Pull models 31 | run: | 32 | ollama pull mistral-nemo 33 | ollama pull moondream 34 | OLLAMA_HOST=127.0.0.1:11435 ollama pull qwen2:0.5b 35 | - name: Set up MATLAB 36 | uses: matlab-actions/setup-matlab@v2 37 | with: 38 | products: Text_Analytics_Toolbox 39 | cache: true 40 | - name: Run tests and generate artifacts 41 | env: 42 | OPENAI_API_KEY: ${{ secrets.OPENAI_KEY }} 43 | AZURE_OPENAI_DEPLOYMENT: ${{ secrets.AZURE_DEPLOYMENT }} 44 | AZURE_OPENAI_ENDPOINT: ${{ secrets.AZURE_ENDPOINT }} 45 | AZURE_OPENAI_API_KEY: ${{ secrets.AZURE_KEY }} 46 | SECOND_OLLAMA_ENDPOINT: 127.0.0.1:11435 47 | uses: matlab-actions/run-tests@v2 48 | with: 49 | source-folder: . -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | *.env 2 | *.asv 3 | *.mat 4 | !tests/recordings/*.mat 5 | startup.m 6 | papers_to_read.csv 7 | data/* 8 | examples/data/* 9 | examples/mlx-scripts/data/* 10 | ._* 11 | .nfs* 12 | .DS_Store 13 | -------------------------------------------------------------------------------- /DEVELOPMENT.md: -------------------------------------------------------------------------------- 1 | # Notes for Developers 2 | 3 | Nothing in this file should be required knowledge to use the repository. These are notes for people actually making changes that are going to be submitted and incorporated into the main branch. 4 | 5 | ## Git Hooks 6 | 7 | After checkout, link or (on Windows) copy the files from `.githooks` into the local `.git/hooks` folder: 8 | 9 | ``` 10 | (cd .git/hooks/; ln -s ../../.githooks/pre-commit .) 11 | ``` 12 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | 2 | # Large Language Models (LLMs) with MATLAB 3 | 4 | [![Open in MATLAB Online](https://www.mathworks.com/images/responsive/global/open-in-matlab-online.svg)](https://matlab.mathworks.com/open/github/v1?repo=matlab-deep-learning/llms-with-matlab) [![View Large Language Models (LLMs) with MATLAB on File Exchange](https://www.mathworks.com/matlabcentral/images/matlab-file-exchange.svg)](https://www.mathworks.com/matlabcentral/fileexchange/163796-large-language-models-llms-with-matlab) 5 | 6 | Large Language Models (LLMs) with MATLAB lets you connect to large language model APIs using MATLAB®. 7 | 8 | 9 | You can connect to: 10 | 11 | - [OpenAI® Chat Completions API](https://platform.openai.com/docs/guides/text-generation/chat-completions-api) — For example, connect to ChatGPT™. 12 | - [OpenAI Images API](https://platform.openai.com/docs/guides/images) — For example, connect to DALL·E™. 13 | - [Azure® OpenAI Service](https://learn.microsoft.com/en-us/azure/ai-services/openai/) — Connect to OpenAI models from Azure. 14 | - [Ollama™](https://ollama.com/) — Connect to models locally or nonlocally. 15 | 16 | Using this add-on, you can: 17 | 18 | - Generate responses to natural language prompts. 19 | - Manage chat history. 20 | - Generate JSON\-formatted and structured output. 21 | - Use tool calling. 22 | - Generate, edit, and describe images. 23 | 24 | For more information about the features in this add-on, see the documentation in the [`doc`](/doc) directory. 25 | 26 | # Installation 27 | 28 | Using this add-on requires MATLAB R2024a or newer. 29 | 30 | ## Use MATLAB Online 31 | 32 | You can use the add-on in MATLAB Online™ by clicking this link: [![Open in MATLAB Online](https://www.mathworks.com/images/responsive/global/open-in-matlab-online.svg)](https://matlab.mathworks.com/open/github/v1?repo=matlab-deep-learning/llms-with-matlab) 33 | 34 | In MATLAB Online, you can connect to OpenAI and Azure. To connect to Ollama, use an installed version of MATLAB and install the add\-on using the Add\-On Explorer or by cloning the GitHub™ repository. 35 | 36 | ## Install using Add\-On Explorer 37 | 38 | The recommended way of using the add-on on an installed version of MATLAB is to use the Add\-On Explorer. 39 | 40 | 1. In MATLAB, go to the **Home** tab, and in the **Environment** section, click the **Add\-Ons** icon. 41 | 2. In the Add\-On Explorer, search for "Large Language Models (LLMs) with MATLAB". 42 | 3. Select **Install**. 43 | ## Install by Cloning GitHub Repository 44 | 45 | Alternatively, to use the add-on on an installed version of MATLAB, you can clone the GitHub repository. In the MATLAB Command Window, run this command: 46 | 47 | ``` 48 | >> !git clone https://github.com/matlab-deep-learning/llms-with-matlab.git 49 | ``` 50 | 51 | To run code from the add-on outside of the installation directory, if you install the add-on by cloning the GitHub repository, then you must add the path to the installation directory. 52 | 53 | ``` 54 | >> addpath("path/to/llms-with-matlab") 55 | ``` 56 | # Get Started with External APIs 57 | 58 | For more information about how to connect to the different APIs from MATLAB, including installation requirements, see: 59 | - [OpenAI](/doc/OpenAI.md) 60 | - [Azure OpenAI Service](/doc/Azure.md) 61 | - [Ollama](/doc/Ollama.md) 62 | 63 | # Examples 64 | 65 | - [Process Generated Text in Real Time by Using ChatGPT in Streaming Mode](/examples/ProcessGeneratedTextinRealTimebyUsingChatGPTinStreamingMode.md) 66 | - [Process Generated Text in Real Time by Using Ollama in Streaming Mode](/examples/ProcessGeneratedTextinRealTimebyUsingOllamaInStreamingMode.md) 67 | - [Summarize Large Documents Using ChatGPT and MATLAB](/examples/SummarizeLargeDocumentsUsingChatGPTandMATLAB.md) (requires Text Analytics Toolbox™) 68 | - [Create Simple ChatBot](/examples/CreateSimpleChatBot.md) (requires Text Analytics Toolbox) 69 | - [Create Simple Ollama ChatBot](/examples/CreateSimpleOllamaChatBot.md) (requires Text Analytics Toolbox) 70 | - [Analyze Scientific Papers Using ChatGPT Function Calls](/examples/AnalyzeScientificPapersUsingFunctionCalls.md) 71 | - [Analyze Sentiment in Text Using ChatGPT and Structured Output](/examples/AnalyzeSentimentinTextUsingChatGPTwithStructuredOutput.md) 72 | - [Analyze Text Data Using Parallel Function Calls with ChatGPT](/examples/AnalyzeTextDataUsingParallelFunctionCallwithChatGPT.md) 73 | - [Analyze Text Data Using Parallel Function Calls with Ollama](/examples/AnalyzeTextDataUsingParallelFunctionCallwithOllama.md) 74 | - [Retrieval-Augmented Generation Using ChatGPT and MATLAB](/examples/RetrievalAugmentedGenerationUsingChatGPTandMATLAB.md) (requires Text Analytics Toolbox) 75 | - [Retrieval-Augmented Generation Using Ollama and MATLAB](/examples/RetrievalAugmentedGenerationUsingOllamaAndMATLAB.md) (requires Text Analytics Toolbox) 76 | - [Describe Images Using ChatGPT](/examples/DescribeImagesUsingChatGPT.md) 77 | - [Using DALL·E To Edit Images](/examples/UsingDALLEToEditImages.md) 78 | - [Using DALL·E To Generate Images](/examples/UsingDALLEToGenerateImages.md) 79 | 80 | # Functions 81 | | **Function** | **Description** | 82 | | :-- | :-- | 83 | | [openAIChat](/doc/functions/openAIChat.md) | Connect to OpenAI Chat Completion API from MATLAB | 84 | | [azureChat](/doc/functions/azureChat.md) | Connect to Azure OpenAI Services Chat Completion API from MATLAB | 85 | | [ollamaChat](/doc/functions/ollamaChat.md) | Connect to Ollama Server from MATLAB | 86 | | [generate](/doc/functions/generate.md) | Generate output from large language models | 87 | | [openAIFunction](/doc/functions/openAIFunction.md) | Use Function Calls from MATLAB | 88 | | [addParameter](/doc/functions/addParameter.md) | Add input argument to `openAIFunction` object | 89 | | [openAIImages](/doc/functions/openAIImages.md) | Connect to OpenAI Image Generation API from MATLAB | 90 | | [openAIImages.generate](/doc/functions/openAIImages.generate.md) | Generate image using OpenAI image generation API | 91 | | [edit](/doc/functions/edit.md) | Edit images using DALL·E 2 | 92 | | [createVariation](/doc/functions/createVariation.md) | Generate image variations using DALL·E 2 | 93 | | [messageHistory](/doc/functions/messageHistory.md) | Manage and store messages in a conversation | 94 | | [addSystemMessage](/doc/functions/addSystemMessage.md) | Add system message to message history | 95 | | [addUserMessage](/doc/functions/addUserMessage.md) | Add user message to message history | 96 | | [addUserMessageWithImages](/doc/functions/addUserMessageWithImages.md) | Add user message with images to message history | 97 | | [addToolMessage](/doc/functions/addToolMessage.md) | Add tool message to message history | 98 | | [addResponseMessage](/doc/functions/addResponseMessage.md) | Add response message to message history | 99 | | [removeMessage](/doc/functions/removeMessage.md) | Remove message from message history | 100 | 101 | ## License 102 | 103 | The license is available in the [license.txt](license.txt) file in this GitHub repository. 104 | 105 | ## Community Support 106 | [MATLAB Central](https://www.mathworks.com/matlabcentral) 107 | 108 | Copyright 2023-2025 The MathWorks, Inc. 109 | -------------------------------------------------------------------------------- /SECURITY.md: -------------------------------------------------------------------------------- 1 | # Reporting Security Vulnerabilities 2 | 3 | If you believe you have discovered a security vulnerability, please report it to 4 | [security@mathworks.com](mailto:security@mathworks.com). Please see 5 | [MathWorks Vulnerability Disclosure Policy for Security Researchers](https://www.mathworks.com/company/aboutus/policies_statements/vulnerability-disclosure-policy.html) 6 | for additional information. -------------------------------------------------------------------------------- /doc/functions/addParameter.md: -------------------------------------------------------------------------------- 1 | 2 | # addParameter 3 | 4 | Add input argument to `openAIFunction` object 5 | 6 | # Syntax 7 | 8 | `fUpdated = addParameter(f,parameterName)` 9 | 10 | 11 | `___ = addParameter(___,Name=Value)` 12 | 13 | # Description 14 | 15 | `fUpdated = addParameter(f,parameterName)` adds an input argument `parameterName` to the `openAIFunction` object `f`. 16 | 17 | 18 | `___ = addParameter(___,Name=Value)` specifies additional options using one or more name\-value arguments. 19 | 20 | # Examples 21 | ## Compute Sine Using OpenAI Function Call 22 | 23 | First, specify the OpenAI® API key as an environment variable and save it to a file called `".env"`. Next, load the environment file using the `loadenv` function. 24 | 25 | ```matlab 26 | loadenv(".env") 27 | ``` 28 | 29 | Create an `openAIFunction` object that represents the [`sind`](https://www.mathworks.com/help/matlab/ref/sind.html) function. The `sind` function has a single input argument, `x`, representing the input angle in degrees. 30 | 31 | ```matlab 32 | f = openAIFunction("sind","Sine of argument in degrees"); 33 | f = addParameter(f,"x",type="number",description="Angle in degrees"); 34 | ``` 35 | 36 | Connect to the OpenAI Chat Completion API. Pass the `openAIFunction` object `f` as an input argument. 37 | 38 | ```matlab 39 | model = openAIChat("You are a helpful assistant.",Tools=f); 40 | ``` 41 | 42 | Initialize the message history. Add a user message to the message history. 43 | 44 | ```matlab 45 | messages = messageHistory; 46 | messages = addUserMessage(messages,"What is the sine of thirty?"); 47 | ``` 48 | 49 | Generate a response based on the message history. 50 | 51 | ```matlab 52 | [~,completeOutput] = generate(model,messages) 53 | ``` 54 | 55 | ```matlabTextOutput 56 | completeOutput = struct with fields: 57 | role: 'assistant' 58 | content: [] 59 | tool_calls: [1x1 struct] 60 | refusal: [] 61 | 62 | ``` 63 | 64 | The model has not generated any text. Instead, it has detected a function call, `completeOutput.tool_calls`. 65 | 66 | 67 | Add the response to the message history. 68 | 69 | ```matlab 70 | messages = addResponseMessage(messages,completeOutput); 71 | ``` 72 | 73 | Extract the tool call ID and the name of the called function. 74 | 75 | ```matlab 76 | toolCallID = string(completeOutput.tool_calls.id) 77 | ``` 78 | 79 | ```matlabTextOutput 80 | toolCallID = "call_Scx4xE9whYiL2FbQWYslDgDr" 81 | ``` 82 | 83 | ```matlab 84 | functionCalled = string(completeOutput.tool_calls.function.name) 85 | ``` 86 | 87 | ```matlabTextOutput 88 | functionCalled = "sind" 89 | ``` 90 | 91 | Make sure that the model is calling the correct function. Even with only a single function, large language models can hallucinate function calls to fictitious functions. 92 | 93 | 94 | Extract the input argument values from the complete output using the [`jsondecode`](https://www.mathworks.com/help/matlab/ref/jsondecode.html) function. Compute the sine of the generated argument value and add the result to the message history using the `addToolMessage` function. 95 | 96 | ```matlab 97 | if functionCalled == "sind" 98 | args = jsondecode(completeOutput.tool_calls.function.arguments); 99 | result = sind(args.x) 100 | messages = addToolMessage(messages,toolCallID,functionCalled,"x="+result); 101 | end 102 | ``` 103 | 104 | ```matlabTextOutput 105 | result = 0.5000 106 | ``` 107 | 108 | Finally, generate a natural language response. 109 | 110 | ```matlab 111 | generatedText = generate(model,messages) 112 | ``` 113 | 114 | ```matlabTextOutput 115 | generatedText = "The sine of 30 degrees is 0.5." 116 | ``` 117 | # Input Arguments 118 | ### f — OpenAI function 119 | 120 | [`openAIFunction`](openAIFunction.md) object 121 | 122 | 123 | OpenAI function, specified as an `openAIFunction` object. 124 | 125 | ### `parameterName` — Name of new input argument 126 | 127 | string scalar | character array 128 | 129 | 130 | Specify the name of the new input argument. The name must be a valid MATLAB variable name. 131 | 132 | 133 | For more information on variable naming rules in MATLAB, see [https://www.mathworks.com/help/matlab/matlab\_prog/variable\-names.html](https://www.mathworks.com/help/matlab/matlab_prog/variable-names.html). 134 | 135 | ## Name\-Value Arguments 136 | ### `RequiredParameter` — Flag to require argument 137 | 138 | `true` (default) | `false` 139 | 140 | 141 | Specify whether the argument is required (`true`) or optional (`false`). 142 | 143 | ### `description` — Argument description 144 | 145 | string scalar | character vector 146 | 147 | 148 | Natural language description of the input argument, specified as a string or character array. 149 | 150 | ### `type` — Argument type 151 | 152 | string scalar | string vector | character vector 153 | 154 | 155 | Data type or types of the input argument, specified as JSON data type. The possible argument types and their corresponding MATLAB data types are: 156 | 157 | - `"string"` — character vector 158 | - `"number"` — scalar double 159 | - `"integer"` — scalar integer 160 | - `"object"` — scalar structure 161 | - `"boolean"` — scalar logical 162 | - `"null"` — `NaN` or empty double 163 | 164 | For more information on how to decode JSON\-formatted data in MATLAB, see [jsondecode](https://www.mathworks.com/help/matlab/ref/jsondecode.html). 165 | 166 | ### `enum` — List of possible argument values 167 | 168 | string vector 169 | 170 | 171 | List of all possible values of an input argument. 172 | 173 | 174 | **Example**: `["on" "off" "auto"]` 175 | 176 | # Output Argument 177 | ### `fUpdated` — Updated OpenAI function 178 | 179 | `openAIFunction` object 180 | 181 | 182 | Updated OpenAI function, specified as an `openAIFunction` object. 183 | 184 | # See Also 185 | 186 | [`openAIFunction`](openAIFunction.md) | [`openAIChat`](openAIChat.md) | [`generate`](generate.md) | [`addToolMessage`](addToolMessage.md) 187 | 188 | - [Analyze Scientific Papers Using ChatGPT Function Calls](../../examples/AnalyzeScientificPapersUsingFunctionCalls.md) 189 | - [Analyze Text Data Using Parallel Function Calls with ChatGPT](../../examples/AnalyzeTextDataUsingParallelFunctionCallwithChatGPT.md) 190 | 191 | *Copyright 2024 The MathWorks, Inc.* 192 | 193 | -------------------------------------------------------------------------------- /doc/functions/addResponseMessage.md: -------------------------------------------------------------------------------- 1 | 2 | # addResponseMessage 3 | 4 | Add response message to message history 5 | 6 | 7 | `updatedMessages = addResponseMessage(messages,completeOutput)` 8 | 9 | # Description 10 | 11 | `updatedMessages = addResponseMessage(messages,completeOutput)` adds the generated output of a large language model to the `messageHistory` object `messages`. 12 | 13 | # Examples 14 | ## Add Response Message to Message History 15 | 16 | First, specify the OpenAI® API key as an environment variable and save it to a file called `".env"`. Next, load the environment file using the `loadenv` function. 17 | 18 | ```matlab 19 | loadenv(".env") 20 | ``` 21 | 22 | Connect to the OpenAI Chat Completion API. 23 | 24 | ```matlab 25 | model = openAIChat("You are a helpful assistant."); 26 | ``` 27 | 28 | Initialize the message history. 29 | 30 | ```matlab 31 | messages = messageHistory; 32 | ``` 33 | 34 | Add a user message to the message history. 35 | 36 | ```matlab 37 | messages = addUserMessage(messages,"Why is a raven like a writing desk?"); 38 | 39 | Generate a response. 40 | 41 | ```matlab 42 | [generatedText,completeOutput,httpResponse] = generate(model,messages); 43 | ``` 44 | 45 | Add the response to the message history. 46 | 47 | ```matlab 48 | messages = addResponseMessage(messages,completeOutput); 49 | messages.Messages{end} 50 | ``` 51 | 52 | ```matlabTextOutput 53 | ans = struct with fields: 54 | role: "assistant" 55 | content: "The question "Why is a raven like a writing desk?" is famously posed by the Mad Hatter in Lewis Carroll's "Alice's Adventures in Wonderland." Initially, it is presented as a riddle without a clear answer, contributing to the absurdity and nonsensical nature of the story. However, over time, various interpretations and answers have been suggested, such as:↵↵1. **Both can produce notes**: Ravens can "caw" or make sounds like a note, and writing desks are used for writing notes or letters.↵2. **Both are associated with writing**: Ravense have historically been linked to writers (like Edgar Allan Poe's famous poem "The Raven"), and desks are where writing is done.↵3. **The riddle is inherently nonsensical**: The whole point may be that some riddles don't have answers, fitting into the whimsical and illogical world of Wonderland.↵↵Carroll himself later suggested that the riddle was meant to be without an answer, thus adding to its charm and mystique in the context of his work." 56 | 57 | ``` 58 | ## Compute Sine Using OpenAI Function Call 59 | 60 | First, specify the OpenAI® API key as an environment variable and save it to a file called `".env"`. Next, load the environment file using the `loadenv` function. 61 | 62 | ```matlab 63 | loadenv(".env") 64 | ``` 65 | 66 | Create an `openAIFunction` object that represents the [`sind`](https://www.mathworks.com/help/matlab/ref/sind.html) function. The `sind` function has a single input argument, `x`, representing the input angle in degrees. 67 | 68 | ```matlab 69 | f = openAIFunction("sind","Sine of argument in degrees"); 70 | f = addParameter(f,"x",type="number",description="Angle in degrees"); 71 | ``` 72 | 73 | Connect to the OpenAI Chat Completion API. Pass the `openAIFunction` object `f` as an input argument. 74 | 75 | ```matlab 76 | model = openAIChat("You are a helpful assistant.",Tools=f); 77 | ``` 78 | 79 | Initialize the message history. Add a user message to the message history. 80 | 81 | ```matlab 82 | messages = messageHistory; 83 | messages = addUserMessage(messages,"What is the sine of thirty?"); 84 | ``` 85 | 86 | Generate a response based on the message history. 87 | 88 | ```matlab 89 | [~,completeOutput] = generate(model,messages) 90 | ``` 91 | 92 | ```matlabTextOutput 93 | completeOutput = struct with fields: 94 | role: 'assistant' 95 | content: [] 96 | tool_calls: [1x1 struct] 97 | refusal: [] 98 | 99 | ``` 100 | 101 | The model has not generated any text. Instead, it has detected a function call, `completeOutput.tool_calls`. 102 | 103 | 104 | Add the response to the message history. 105 | 106 | ```matlab 107 | messages = addResponseMessage(messages,completeOutput); 108 | ``` 109 | 110 | Extract the tool call ID and the name of the called function. 111 | 112 | ```matlab 113 | toolCallID = string(completeOutput.tool_calls.id) 114 | ``` 115 | 116 | ```matlabTextOutput 117 | toolCallID = "call_VLRxaOUTDEyzCY4c8rDnq0jM" 118 | ``` 119 | 120 | ```matlab 121 | functionCalled = string(completeOutput.tool_calls.function.name) 122 | ``` 123 | 124 | ```matlabTextOutput 125 | functionCalled = "sind" 126 | ``` 127 | 128 | Make sure that the model is calling the correct function. Even with only a single function, large language models can hallucinate function calls to fictitious functions. 129 | 130 | 131 | Extract the input argument values from the complete output using the [`jsondecode`](https://www.mathworks.com/help/matlab/ref/jsondecode.html) function. Compute the sine of the generated argument value and add the result to the message history using the `addToolMessage` function. 132 | 133 | ```matlab 134 | if functionCalled == "sind" 135 | args = jsondecode(completeOutput.tool_calls.function.arguments); 136 | result = sind(args.x) 137 | messages = addToolMessage(messages,toolCallID,functionCalled,"x="+result); 138 | end 139 | ``` 140 | 141 | ```matlabTextOutput 142 | result = 0.5000 143 | ``` 144 | 145 | Finally, generate a natural language response. 146 | 147 | ```matlab 148 | generatedText = generate(model,messages) 149 | ``` 150 | 151 | ```matlabTextOutput 152 | generatedText = "The sine of thirty degrees is 0.5." 153 | ``` 154 | # Input Arguments 155 | ### `messages` — Message history 156 | 157 | `messageHistory` object 158 | 159 | 160 | Message history, specified as a [`messageHistory`](messageHistory.md) object. 161 | 162 | ### completeOutput — Complete output 163 | 164 | structure array 165 | 166 | 167 | Complete output generated from a large language model using the [`generate`](generate.md) function, specified as a structure array. 168 | 169 | 170 | The type and name of the fields in the structure depend on the API, the model, whether you use function calls, and whether you stream the output. 171 | 172 | # Output Argument 173 | ### `updatedMessages` — Updated message history 174 | 175 | `messageHistory` object 176 | 177 | 178 | Updated message history, specified as a [`messageHistory`](messageHistory.md) object. 179 | 180 | 181 | The updated message history includes a new structure array with these fields: 182 | 183 | - role —`"assistant"` 184 | - content — Set by the `content` input argument 185 | 186 | If the generated response includes a function call, then the updated message history also includes this field: 187 | 188 | - tool\_calls — `completeOutput.tool_calls` structure array 189 | # See Also 190 | 191 | [`generate`](generate.md) | [`messageHistory`](messageHistory.md) | [`openAIChat`](openAIChat.md) | [`ollamaChat`](ollamaChat.md) | [`azureChat`](azureChat.md) | [`addUserMessage`](addUserMessage.md) | [`addUserMessageWithImage`](addUserMessageWithImage.md) | [`addToolMessage`](addToolMessage.md) | [`addSystemMessage`](addSystemMessage.md) 192 | 193 | - [Analyze Text Data Using Parallel Function Calls with ChatGPT](../../examples/AnalyzeTextDataUsingParallelFunctionCallwithChatGPT.md) 194 | 195 | *Copyright 2024 The MathWorks, Inc.* 196 | 197 | -------------------------------------------------------------------------------- /doc/functions/addSystemMessage.md: -------------------------------------------------------------------------------- 1 | 2 | # addSystemMessage 3 | 4 | Add system message to message history 5 | 6 | 7 | `updatedMessages = addSystemMessage(messages,name,content)` 8 | 9 | # Description 10 | 11 | You can use system messages to add example conversations to the message history. 12 | 13 | 14 | Use example conversations in system messages for *few\-shot prompting*. Few\-shot prompting is a form of prompt engineering. Provide examples of user input and expected model output to a large language model to prompt its future behavior. 15 | 16 | 17 | `updatedMessages = addSystemMessage(messages,name,content)` adds a system message to the `messageHistory` object `messages` and specifies the name of the speaker and the content of the message. 18 | 19 | # Examples 20 | ## Generate Text from Example Conversation 21 | 22 | First, specify the OpenAI® API key as an environment variable and save it to a file called `".env"`. Next, load the environment file using the `loadenv` function. 23 | 24 | ```matlab 25 | loadenv(".env") 26 | ``` 27 | 28 | Connect to the OpenAI Chat Completion API. Use a system prompt to instruct the model. 29 | 30 | ```matlab 31 | model = openAIChat("You are a helpful assistants who judges whether two English words rhyme. You answer either yes or no."); 32 | ``` 33 | 34 | Initialize the message history. 35 | 36 | ```matlab 37 | messages = messageHistory; 38 | ``` 39 | 40 | Add example messages to the message history. When you pass this to the model, this example conversation further instructs the model on the output you want it to generate. 41 | 42 | ```matlab 43 | messages = addSystemMessage(messages,"example_user","House and mouse?"); 44 | messages = addSystemMessage(messages,"example_assistant","Yes"); 45 | messages = addSystemMessage(messages,"example_user","Thought and brought?"); 46 | messages = addSystemMessage(messages,"example_assistant","Yes"); 47 | messages = addSystemMessage(messages,"example_user","Tough and though?"); 48 | messages = addSystemMessage(messages,"example_assistant","No"); 49 | ``` 50 | 51 | Add a user message to the message history. When you pass this to the model, the system messages act as an extension of the system prompt. The user message acts as the prompt. 52 | 53 | ```matlab 54 | messages = addUserMessage(messages,"Love and move?"); 55 | ``` 56 | 57 | Generate a response from the message history. 58 | 59 | ```matlab 60 | generate(model,messages) 61 | ``` 62 | 63 | ```matlabTextOutput 64 | ans = "No" 65 | ``` 66 | # Input Arguments 67 | ### `messages` — Message history 68 | 69 | `messageHistory` object 70 | 71 | 72 | Message history, specified as a [`messageHistory`](messageHistory.md) object. 73 | 74 | ### `name` — Name of the speaker 75 | 76 | string scalar | character vector 77 | 78 | 79 | Name of the speaker, specified as a string scalar or character vector. The name must be nonempty. 80 | 81 | 82 | To use system messages with an OpenAI API, the name must only contain letters, numbers, underscores (\_), and dashes (\-). 83 | 84 | 85 | **Example**: `"example_assistant"` 86 | 87 | ### `content` — Message content 88 | 89 | string scalar | character vector 90 | 91 | 92 | Message content, specified as a string scalar or character vector. The content must be nonempty. 93 | 94 | # Output Argument 95 | ### `updatedMessages` — Updated message history 96 | 97 | `messageHistory` object 98 | 99 | 100 | Updated message history, specified as a `messageHistory` object. The updated message history includes a new structure array with these fields: 101 | 102 | - role —`"system"` 103 | - name — Set by the `name` input argument 104 | - content — Set by the `content` input argument 105 | # See Also 106 | 107 | [`generate`](generate.md) | [`messageHistory`](messageHistory.md) | [`openAIChat`](openAIChat.md) | [`ollamaChat`](ollamaChat.md) | [`azureChat`](azureChat.md) | [`addUserMessage`](addUserMessage.md) | [`addUserMessageWithImage`](http://addusermessagewithimage.md) | [`addToolMessage`](addToolMessage.md) | [`addResponseMessage`](addResponseMessage.md) 108 | 109 | 110 | *Copyright 2024 The MathWorks, Inc.* 111 | 112 | -------------------------------------------------------------------------------- /doc/functions/addToolMessage.md: -------------------------------------------------------------------------------- 1 | 2 | # addToolMessage 3 | 4 | Add tool message to message history 5 | 6 | 7 | `updatedMessages = addToolMessage(messages,toolCallID,name,content)` 8 | 9 | # Description 10 | 11 | Add tool messages to the message history to pass the return of a function call to a large language model. For more information on function calling, see [`openAIFunction`](openAIFunction.md). 12 | 13 | 14 | `updatedMessages = addToolMessage(messages,toolCallID,name,content)` adds a tool message to the `messageHistory` object `messages` and specifies the tool call ID, the name of the speaker, and the content of the message. 15 | 16 | # Examples 17 | ## Compute Sine Using OpenAI Function Call 18 | 19 | First, specify the OpenAI® API key as an environment variable and save it to a file called `".env"`. Next, load the environment file using the `loadenv` function. 20 | 21 | ```matlab 22 | loadenv(".env") 23 | ``` 24 | 25 | Create an `openAIFunction` object that represents the [`sind`](https://www.mathworks.com/help/matlab/ref/sind.html) function. The `sind` function has a single input argument, `x`, representing the input angle in degrees. 26 | 27 | ```matlab 28 | f = openAIFunction("sind","Sine of argument in degrees"); 29 | f = addParameter(f,"x",type="number",description="Angle in degrees"); 30 | ``` 31 | 32 | Connect to the OpenAI Chat Completion API. Pass the `openAIFunction` object `f` as an input argument. 33 | 34 | ```matlab 35 | model = openAIChat("You are a helpful assistant.",Tools=f); 36 | ``` 37 | 38 | Initialize the message history. Add a user message to the message history. 39 | 40 | ```matlab 41 | messages = messageHistory; 42 | messages = addUserMessage(messages,"What is the sine of thirty?"); 43 | ``` 44 | 45 | Generate a response based on the message history. 46 | 47 | ```matlab 48 | [~,completeOutput] = generate(model,messages) 49 | ``` 50 | 51 | ```matlabTextOutput 52 | completeOutput = struct with fields: 53 | role: 'assistant' 54 | content: [] 55 | tool_calls: [1x1 struct] 56 | refusal: [] 57 | 58 | ``` 59 | 60 | The model has not generated any text. Instead, it has created a function call, `completeOutput.tool_calls`. 61 | 62 | 63 | Add the response to the message history. 64 | 65 | ```matlab 66 | messages = addResponseMessage(messages,completeOutput); 67 | ``` 68 | 69 | Extract the tool call ID and the name of the called function. 70 | 71 | ```matlab 72 | toolCallID = string(completeOutput.tool_calls.id) 73 | ``` 74 | 75 | ```matlabTextOutput 76 | toolCallID = "call_fnCZwyltX0jJmVweBTAgC4qI" 77 | ``` 78 | 79 | ```matlab 80 | functionCalled = string(completeOutput.tool_calls.function.name) 81 | ``` 82 | 83 | ```matlabTextOutput 84 | functionCalled = "sind" 85 | ``` 86 | 87 | Make sure that the model is calling the correct function. Even with only a single function, large language models can hallucinate function calls to fictitious functions. 88 | 89 | 90 | Extract the input argument values from the complete output using the [`jsondecode`](https://www.mathworks.com/help/matlab/ref/jsondecode.html) function. Compute the sine of the generated argument value and add the result to the message history using the `addToolMessage` function. 91 | 92 | ```matlab 93 | if functionCalled == "sind" 94 | args = jsondecode(completeOutput.tool_calls.function.arguments); 95 | result = sind(args.x) 96 | messages = addToolMessage(messages,toolCallID,functionCalled,"x="+result); 97 | end 98 | ``` 99 | 100 | ```matlabTextOutput 101 | result = 0.5000 102 | ``` 103 | 104 | Finally, generate a natural language response. 105 | 106 | ```matlab 107 | generatedText = generate(model,messages) 108 | ``` 109 | 110 | ```matlabTextOutput 111 | generatedText = "The sine of 30 degrees is 0.5." 112 | ``` 113 | # Input Arguments 114 | ### `messages` — Message history 115 | 116 | `messageHistory` object 117 | 118 | 119 | Message history, specified as a [`messageHistory`](messageHistory.md) object. 120 | 121 | ### `toolCallID` — Tool call ID 122 | 123 | string scalar | character vector 124 | 125 | 126 | Tool call ID, specified as a string scalar or character vector. 127 | 128 | 129 | If an LLM creates a function call during generation, then the tool call ID is part of the complete output of the [`generate`](generate.md) function. 130 | 131 | ### `name` — Tool name 132 | 133 | string scalar | character vector 134 | 135 | 136 | Name of the tool, specified as a string scalar or character vector. The name must be nonempty and must only contain letters, numbers, underscores (\_), and dashes (\-). 137 | 138 | ### `content` — Message content 139 | 140 | string scalar | character vector 141 | 142 | 143 | Message content, specified as a string scalar or character vector. The content must be nonempty. 144 | 145 | # Output Argument 146 | ### `updatedMessages` — Updated message history 147 | 148 | `messageHistory` object 149 | 150 | 151 | Updated message history, specified as a [`messageHistory`](messageHistory.md) object. The updated message history includes a new structure array with these fields: 152 | 153 | - tool\_call\_id — Set by the `toolCallID` input argument 154 | - role —`"tool"` 155 | - name — Set by the `name` input argument 156 | - content — Set by the `content` input argument 157 | # See Also 158 | 159 | [`messageHistory`](messageHistory.md) | [`openAIFunction`](openAIFunction.md) | [`generate`](generate.md) | [`addResponseMessage`](addResponseMessage.md) 160 | 161 | - [Analyze Text Data Using Parallel Function Calls with ChatGPT](../../examples/AnalyzeTextDataUsingParallelFunctionCallwithChatGPT.md) 162 | - [Analyze Text Data Using Parallel Function Calls with Ollama](../../examples/AnalyzeTextDataUsingParallelFunctionCallwithOllama.md) 163 | 164 | *Copyright 2024 The MathWorks, Inc.* 165 | 166 | -------------------------------------------------------------------------------- /doc/functions/addUserMessage.md: -------------------------------------------------------------------------------- 1 | 2 | # addUserMessage 3 | 4 | Add user message to message history 5 | 6 | 7 | `updatedMessages = addUserMessage(messages,content)` 8 | 9 | # Description 10 | 11 | `updatedMessages = addUserMessage(messages,content)` adds a user message to the `messageHistory` object `messages` and specifies the content of the message. 12 | 13 | # Examples 14 | ## Add User Message to Message History 15 | 16 | Initialize the message history. 17 | 18 | ```matlab 19 | messages = messageHistory; 20 | ``` 21 | 22 | Add a user message to the message history. 23 | 24 | ```matlab 25 | messages = addUserMessage(messages,"Where is Natick located?"); 26 | messages.Messages{1} 27 | ``` 28 | 29 | ```matlabTextOutput 30 | ans = struct with fields: 31 | role: "user" 32 | content: "Where is Natick located?" 33 | 34 | ``` 35 | # Input Arguments 36 | ### `messages` — Message history 37 | 38 | `messageHistory` object 39 | 40 | 41 | Message history, specified as a [`messageHistory`](messageHistory.md) object. 42 | 43 | ### `content` — Message content 44 | 45 | string scalar | character vector 46 | 47 | 48 | Message content, specified as a string scalar or character vector. The content must be nonempty. 49 | 50 | # Output Argument 51 | ### `updatedMessages` — Updated message history 52 | 53 | `messageHistory` object 54 | 55 | 56 | Updated message history, specified as a `messageHistory` object. The updated message history includes a new structure array with these fields: 57 | 58 | - role —`"user"` 59 | - content — Set by the `content` input argument 60 | # See Also 61 | 62 | [`messageHistory`](messageHistory.md) | [`generate`](generate.md) | [`openAIChat`](openAIChat.md) | [`ollamaChat`](ollamaChat.md) | [`azureChat`](azureChat.md) | [`addSystemMessage`](addSystemMessage.md) | [`addUserMessageWithImage`](addUserMessageWithImage.md) | [`addToolMessage`](addToolMessage.md) | [`addResponseMessage`](addResponseMessage.md) 63 | 64 | 65 | *Copyright 2024 The MathWorks, Inc.* 66 | 67 | -------------------------------------------------------------------------------- /doc/functions/addUserMessageWithImages.md: -------------------------------------------------------------------------------- 1 | 2 | # addUserMessageWithImages 3 | 4 | Add user message with images to message history 5 | 6 | 7 | `updatedMessages = addUserMessageWithImages(messages,content,images)` 8 | 9 | 10 | `___ = addUserMessageWithImages(___,Name=Value)` 11 | 12 | # Description 13 | 14 | `updatedMessages = addUserMessageWithImages(messages,content,images)` adds a user message with images to the [`messageHistory`](messageHistory.md) object `messages`. 15 | 16 | 17 | `___ = addUserMessageWithImages(___,Detail=detail)` also specifies the image resolution to send to the model. 18 | 19 | # Examples 20 | ## Generate Image Description Using OpenAI Chat 21 | 22 | First, specify the OpenAI® API key as an environment variable and save it to a file called `".env"`. Next, load the environment file using the `loadenv` function. 23 | 24 | ```matlab 25 | loadenv(".env") 26 | ``` 27 | 28 | Create an `openAIChat` object. 29 | 30 | ```matlab 31 | model = openAIChat; 32 | ``` 33 | 34 | Initialize the message history. 35 | 36 | ```matlab 37 | messages = messageHistory; 38 | ``` 39 | 40 | View the image `"peppers.png"`, which is included in your MATLAB® installation. 41 | 42 | ```matlab 43 | image = "peppers.png"; 44 | imshow(image) 45 | ``` 46 | 47 | ![An image showing a bunch of vegetables, mostly bell peppers.](images/addUserMessageWithImages1.png) 48 | 49 | Add a user message including the image to the message history. 50 | 51 | ```matlab 52 | messages = addUserMessageWithImages(messages,"Describe the image.",image); 53 | messages.Messages{1} 54 | ``` 55 | 56 | ```matlabTextOutput 57 | ans = struct with fields: 58 | role: "user" 59 | content: "Describe the image." 60 | images: "peppers.png" 61 | image_detail: "auto" 62 | 63 | ``` 64 | 65 | Generate text. 66 | 67 | ```matlab 68 | generate(model,messages,MaxNumTokens=30) 69 | ``` 70 | 71 | ```matlabTextOutput 72 | ans = "The image features a colorful assortment of various types of peppers and garlic. The peppers come in a range of colors, including red, yellow, green," 73 | ``` 74 | # Input Arguments 75 | ### `messages` — Message history 76 | 77 | `messageHistory` object 78 | 79 | 80 | Message history, specified as a [`messageHistory`](messageHistory.md) object. 81 | 82 | ### `content` — Message content 83 | 84 | string scalar | character vector 85 | 86 | 87 | Message content, specified as a string scalar or character vector. The content must be nonempty. 88 | 89 | ### `images` \- Images 90 | 91 | string scalar | string array 92 | 93 | 94 | Input image, specified as a PNG (\*.png) file. 95 | 96 | ## Name\-Value Arguments 97 | ### `Detail` — Image resolution 98 | 99 | `"auto"` (default) | `"high"` | `"low"` 100 | 101 | 102 | Specify the resolution of the image you pass to the large language model. 103 | 104 | 105 | If you set the image resolution to `"low"`, then the `generate` function sends a 512\-by\-512 version of the image to the LLM. 106 | 107 | 108 | If you set the image resolution to `"high"`, then the `generate` function also sends the full image in chunks of 512\-by\-512 tiles. This option can be more expensive. 109 | 110 | 111 | If you set the image resolution to `"auto"`, then the resolution sent to the model depends on the image size. 112 | 113 | 114 | For more information, see [https://platform.openai.com/docs/guides/vision](https://platform.openai.com/docs/guides/vision). 115 | 116 | # Output Argument 117 | ### `updatedMessages` — Updated message history 118 | 119 | `messageHistory` object 120 | 121 | 122 | Updated message history, specified as a `messageHistory` object. The updated message history includes a new structure array with these fields: 123 | 124 | - role —`"user"` 125 | - content — Set by the `content` input argument 126 | - images — Set by the `images` input argument 127 | - image\_detail — Set by the `Detail` name\-value argument 128 | # See Also 129 | 130 | [`messageHistory`](messageHistory.md) | [`generate`](generate.md) 131 | 132 | 133 | *Copyright 2024 The MathWorks, Inc.* 134 | 135 | -------------------------------------------------------------------------------- /doc/functions/createVariation.md: -------------------------------------------------------------------------------- 1 | 2 | # createVariation 3 | 4 | Generate image variations using DALL·E 2 5 | 6 | 7 | `[imageVariation,httpResponse] = createVariation(model,image)` 8 | 9 | 10 | `___ = createVariation(___,Name=Value)` 11 | 12 | # Description 13 | 14 | `[imageVariation,httpResponse] = createVariation(model,image)` generates variations from an image using the OpenAI® image generation model DALL·E 2. 15 | 16 | 17 | `___ = createVariation(___,Name=Value)` specifies additional options using one or more name\-value arguments. 18 | 19 | # Examples 20 | ## Generate Image Variations Using DALL·E 2 21 | 22 | First, specify the OpenAI API key as an environment variable and save it to a file called `".env"`. Next, load the environment file using the `loadenv` function. 23 | 24 | ```matlab 25 | loadenv(".env") 26 | ``` 27 | 28 | Connect to the OpenAI Images API. By default, the model is DALL·E 2. 29 | 30 | ```matlab 31 | model = openAIImages 32 | ``` 33 | 34 | ```matlabTextOutput 35 | model = 36 | openAIImages with properties: 37 | 38 | ModelName: "dall-e-2" 39 | TimeOut: 10 40 | 41 | ``` 42 | 43 | Generate and display an image based on a natural language prompt. 44 | 45 | ```matlab 46 | frogImage = generate(model,"A small frog wearing spectacles."); 47 | imshow(frogImage{1}) 48 | ``` 49 | 50 | ![An image of a frog wearing glasses.](images/createVariation1.png) 51 | 52 | Save the image to a PNG (\*.png) file. 53 | 54 | ```matlab 55 | imwrite(frogImage{1},"frog.png") 56 | ``` 57 | 58 | Create four different variations of the image. 59 | 60 | ```matlab 61 | [imageVariations,httpResponse] = createVariation(model,"frog.png",NumImages=4) 62 | ``` 63 | | |1| 64 | |:--:|:--:| 65 | |1|1024x1024x3 uint8| 66 | |2|1024x1024x3 uint8| 67 | |3|1024x1024x3 uint8| 68 | |4|1024x1024x3 uint8| 69 | 70 | ```matlabTextOutput 71 | httpResponse = 72 | ResponseMessage with properties: 73 | 74 | StatusLine: 'HTTP/1.1 200 OK' 75 | StatusCode: OK 76 | Header: [1x18 matlab.net.http.HeaderField] 77 | Body: [1x1 matlab.net.http.MessageBody] 78 | Completed: 0 79 | 80 | ``` 81 | 82 | Show the image variations in a montage. 83 | 84 | ```matlab 85 | montage(imageVariations) 86 | ``` 87 | 88 | ![Four images of frogs wearing glasses.](images/createVariation2.png) 89 | # Input Arguments 90 | ### `model` — Image generation model 91 | 92 | `openAIImages` object 93 | 94 | 95 | Image generation model, specified as an [`openAIImages`](openAIImages.md) object. The model name `model.ModelName` must be `"dall-e-2"`. 96 | 97 | ### `image` — Input image 98 | 99 | string scalar | character vector 100 | 101 | 102 | Input image, specified as a PNG (\*.png) file. The size of the image must be less than 4MB and the image must be square. 103 | 104 | ## Name\-Value Arguments 105 | ### `NumImages` — Number of images to generate 106 | 107 | `1` (default) | positive integer less than or equal to `10` 108 | 109 | 110 | Specify the number of images to generate. 111 | 112 | ### `Size` — Size of generated image 113 | 114 | `"1024x1024"` (default) | `"256x256"` | `"512x512"` 115 | 116 | 117 | Size of the generated image in pixels. 118 | 119 | # Output Argument 120 | ### `imageVariation` — Generated image variation 121 | 122 | cell array of numerical matrices 123 | 124 | 125 | Images that the model generates, returned as a cell array with `NumImages` elements. Each element of the cell array contains a generated image specified as an RGB images of size `Size`. For example, if you specify `Size="1024x1024"`, then the generated images have size `1024x1024x3`. 126 | 127 | ### `httpResponse` — HTTP response message 128 | 129 | `matlab.net.http.ResponseMessage` object 130 | 131 | 132 | Response message returned by the server, specified as a [`matlab.net.http.ResponseMessage`](https://www.mathworks.com/help/matlab/ref/matlab.net.http.responsemessage-class.html) object. 133 | 134 | # See Also 135 | 136 | [`openAIImages`](openAIImages.md) | [`generate`](openAIImages.generate.md) | [`edit`](edit.md) 137 | 138 | - [Using DALL·E to Generate Images](../../examples/UsingDALLEToGenerateImages.md) 139 | - [Using DALL·E to Edit Images](../../examples/UsingDALLEToEditImages.md) 140 | 141 | *Copyright 2024 The MathWorks, Inc.* 142 | 143 | -------------------------------------------------------------------------------- /doc/functions/edit.md: -------------------------------------------------------------------------------- 1 | 2 | # edit 3 | 4 | Edit images using DALL·E 2 5 | 6 | 7 | `[editedImages,httpResponse] = edit(model,image,prompt)` 8 | 9 | 10 | `___ = edit(___,Name=Value)` 11 | 12 | # Description 13 | 14 | Edit images using the OpenAI® image generation model DALL·E 2. 15 | 16 | 17 | Specify the area that you want to edit using a mask. The transparent areas of the mask, that is, anywhere that the mask is equal to zero, determine the parts of the source image that are edited. 18 | 19 | 20 | You can specify a mask using the `MaskImagePath` name\-value argument. If you do not specify a mask, then your input image must include a transparency layer. The function then uses the transparency as the mask. 21 | 22 | 23 | `[editedImages,httpResponse] = edit(model,image,prompt)` edits an existing image using DALL·E 2 and a natural language prompt. 24 | 25 | 26 | `___ = edit(___,Name=Value)` specifies additional options using one or more name\-value arguments. 27 | 28 | # Examples 29 | ## Edit Image Using DALL·E 2 30 | 31 | First, specify the OpenAI® API key as an environment variable and save it to a file called `".env"`. Next, load the environment file using the `loadenv` function. 32 | 33 | ```matlab 34 | loadenv(".env") 35 | ``` 36 | 37 | Connect to the OpenAI Images API. By default, the model is DALL·E 2. 38 | 39 | ```matlab 40 | model = openAIImages; 41 | ``` 42 | 43 | Load and display the source image. 44 | 45 | ```matlab 46 | imagePath = "llms-with-MATLAB/examples/images/bear.png"; 47 | figure 48 | imshow(imagePath) 49 | ``` 50 | 51 | ![An image of a bear climbing a tree.](images/edit1.png) 52 | 53 | Create a mask to cover the top left of the image. 54 | 55 | ```matlab 56 | mask = ones(1024,1024); 57 | mask(1:512,1:512) = 0; 58 | imwrite(mask,"topLeftMask.png"); 59 | ``` 60 | 61 | Edit the image. 62 | 63 | ```matlab 64 | [editedImages,httpResponse] = edit(model,imagePath,"Add a big red apple to the tree.",MaskImagePath="topLeftMask.png") 65 | ``` 66 | 67 | ```matlabTextOutput 68 | editedImages = 1x1 cell array 69 | {1024x1024x3 uint8} 70 | 71 | httpResponse = 72 | ResponseMessage with properties: 73 | 74 | StatusLine: 'HTTP/1.1 200 OK' 75 | StatusCode: OK 76 | Header: [1x18 matlab.net.http.HeaderField] 77 | Body: [1x1 matlab.net.http.MessageBody] 78 | Completed: 0 79 | 80 | ``` 81 | 82 | Display the new image. 83 | 84 | ```matlab 85 | imshow(editedImages{1}) 86 | ``` 87 | 88 | ![A similar image of a bear climbing a tree, with a large apple in the top left corner.](images/edit2.png) 89 | # Input Arguments 90 | ### `model` — Image generation model 91 | 92 | `openAIImages` object 93 | 94 | 95 | Image generation model, specified as an [`openAIImages`](openAIImages.md) object. The model name `model.ModelName` must be `"dall-e-2"`. 96 | 97 | ### `image` — Input image 98 | 99 | string scalar | character vector 100 | 101 | 102 | Input image, specified as a PNG (\*.png) file. The size of the image must be less than 4MB and the image must be square. 103 | 104 | 105 | If you do not specify an editing mask, then the image must include a transparency dimension. The model will then use the transparency as the mask. 106 | 107 | 108 | **Example**: `"myImageRepository/testImage.png"` 109 | 110 | ### `prompt` — User prompt 111 | 112 | character vector | string scalar 113 | 114 | 115 | Natural language prompt instructing the model what to do. 116 | 117 | 118 | The user prompt must include fewer than or equal to 1000 characters. 119 | 120 | 121 | **Example:** `"Please add an ice cream sundae to the picture."` 122 | 123 | ## Name\-Value Arguments 124 | ### `MaskImagePath` — Path to mask 125 | 126 | string scalar | character vector 127 | 128 | 129 | Mask, specified as a gray scale PNG (\*.png) file. The size of the mask must be less than 4MB and the mask must have the same dimensions as the input image. 130 | 131 | 132 | The transparent areas of the mask, that is, anywhere that the mask is equal to zero, determine the parts of the source image to edit. 133 | 134 | 135 | If you do not specify a mask, then your input image must include a transparency dimension. The function then uses the transparency as the mask. 136 | 137 | ### `NumImages` — Number of images to generate 138 | 139 | `1` (default) | positive integer less than or equal to 10 140 | 141 | 142 | Specify the number of images to generate. 143 | 144 | ### `Size` — Size of generated image 145 | 146 | `"1024x1024"` (default) | `"256x256"` | `"512x512"` 147 | 148 | 149 | Size of the generated image in pixels. 150 | 151 | # Output Argument 152 | ### `editedImages` — Edited images 153 | 154 | cell array of numerical matrices 155 | 156 | 157 | Images that the model generates, returned as a cell array with `NumImages` elements. Each element of the cell array contains a generated image specified as an RGB image of the same size as the input image. 158 | 159 | ### `httpResponse` — HTTP response message 160 | 161 | `matlab.net.http.ResponseMessage` object 162 | 163 | 164 | Response message returned by the server, specified as a [`matlab.net.http.ResponseMessage`](https://www.mathworks.com/help/matlab/ref/matlab.net.http.responsemessage-class.html) object. 165 | 166 | # See Also 167 | 168 | [`openAIImages`](openAIImages.md) | [`generate`](openAIImages.generate.md) | [`createVariation`](createVariation.md) 169 | 170 | - [Using DALL·E to Generate Images](../../examples/UsingDALLEToGenerateImages.md) 171 | - [Using DALL·E to Edit Images](../../examples/UsingDALLEToEditImages.md) 172 | 173 | *Copyright 2024 The MathWorks, Inc.* 174 | 175 | -------------------------------------------------------------------------------- /doc/functions/images/addUserMessageWithImages1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/matlab-deep-learning/llms-with-matlab/d54de597b9235796f4e05a6bb752f2d1b4cd9c47/doc/functions/images/addUserMessageWithImages1.png -------------------------------------------------------------------------------- /doc/functions/images/azureEnvExample.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/matlab-deep-learning/llms-with-matlab/d54de597b9235796f4e05a6bb752f2d1b4cd9c47/doc/functions/images/azureEnvExample.png -------------------------------------------------------------------------------- /doc/functions/images/boardwalk.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/matlab-deep-learning/llms-with-matlab/d54de597b9235796f4e05a6bb752f2d1b4cd9c47/doc/functions/images/boardwalk.png -------------------------------------------------------------------------------- /doc/functions/images/createVariation1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/matlab-deep-learning/llms-with-matlab/d54de597b9235796f4e05a6bb752f2d1b4cd9c47/doc/functions/images/createVariation1.png -------------------------------------------------------------------------------- /doc/functions/images/createVariation2.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/matlab-deep-learning/llms-with-matlab/d54de597b9235796f4e05a6bb752f2d1b4cd9c47/doc/functions/images/createVariation2.png -------------------------------------------------------------------------------- /doc/functions/images/edit1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/matlab-deep-learning/llms-with-matlab/d54de597b9235796f4e05a6bb752f2d1b4cd9c47/doc/functions/images/edit1.png -------------------------------------------------------------------------------- /doc/functions/images/edit2.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/matlab-deep-learning/llms-with-matlab/d54de597b9235796f4e05a6bb752f2d1b4cd9c47/doc/functions/images/edit2.png -------------------------------------------------------------------------------- /doc/functions/images/envExample.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/matlab-deep-learning/llms-with-matlab/d54de597b9235796f4e05a6bb752f2d1b4cd9c47/doc/functions/images/envExample.png -------------------------------------------------------------------------------- /doc/functions/images/octopus.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/matlab-deep-learning/llms-with-matlab/d54de597b9235796f4e05a6bb752f2d1b4cd9c47/doc/functions/images/octopus.png -------------------------------------------------------------------------------- /doc/functions/images/openAIFunction1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/matlab-deep-learning/llms-with-matlab/d54de597b9235796f4e05a6bb752f2d1b4cd9c47/doc/functions/images/openAIFunction1.png -------------------------------------------------------------------------------- /doc/functions/images/openAIFunction2.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/matlab-deep-learning/llms-with-matlab/d54de597b9235796f4e05a6bb752f2d1b4cd9c47/doc/functions/images/openAIFunction2.png -------------------------------------------------------------------------------- /doc/functions/images/openAIImages.generate1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/matlab-deep-learning/llms-with-matlab/d54de597b9235796f4e05a6bb752f2d1b4cd9c47/doc/functions/images/openAIImages.generate1.png -------------------------------------------------------------------------------- /doc/functions/images/openAIImages1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/matlab-deep-learning/llms-with-matlab/d54de597b9235796f4e05a6bb752f2d1b4cd9c47/doc/functions/images/openAIImages1.png -------------------------------------------------------------------------------- /doc/functions/openAIImages.generate.md: -------------------------------------------------------------------------------- 1 | 2 | # generate 3 | 4 | Generate image using OpenAI® image generation API 5 | 6 | 7 | `[images,httpResponse] = generate(model,prompt)` 8 | 9 | 10 | `___ = generate(___,Name=Value)` 11 | 12 | # Description 13 | 14 | `[images,httpResponse] = generate(model,prompt)` generates images from an OpenAI image generation model given a natural language prompt. 15 | 16 | 17 | `___ = generate(___,Name=Value)` specifies additional options using one or more name\-value arguments. 18 | 19 | # Examples 20 | ## Generate Image Using DALL·E 2 21 | 22 | First, specify the OpenAI API key as an environment variable and save it to a file called `".env"`. Next, load the environment file using the `loadenv` function. 23 | 24 | ```matlab 25 | loadenv(".env") 26 | ``` 27 | 28 | Connect to the OpenAI Images API. By default, the model is DALL·E 2. 29 | 30 | ```matlab 31 | model = openAIImages 32 | ``` 33 | 34 | ```matlabTextOutput 35 | model = 36 | openAIImages with properties: 37 | 38 | ModelName: "dall-e-2" 39 | TimeOut: 10 40 | 41 | ``` 42 | 43 | Generate and display an image based on a natural language prompt. 44 | 45 | ```matlab 46 | catImage = generate(model,"An image of a cat confused by a complicated knitting pattern."); 47 | imshow(catImage{1}) 48 | ``` 49 | 50 | ![An image of a cartoon cat thinking about the tangle of red wool in front of it.](images/openAIImages.generate1.png) 51 | # Input Arguments 52 | ### `model` — Image generation model 53 | 54 | `openAIImages` object 55 | 56 | 57 | Image generation model, specified as an [`openAIImages`](openAIImages.md) object. 58 | 59 | ### `prompt` — User prompt 60 | 61 | character vector | string scalar 62 | 63 | 64 | Natural language prompt instructing the model what to do. 65 | 66 | 67 | **Example:** `"Please draw a frog wearing spectacles."` 68 | 69 | ## Name\-Value Arguments 70 | ### `NumImages` — Number of images to generate 71 | 72 | `1` (default) | positive integer 73 | 74 | 75 | Specify the number of images to generate. 76 | 77 | 78 | Generating more than one image at once is only supported for DALL·E 2. 79 | 80 | ### `Size` — Size of generated image 81 | 82 | `"1024x1024"` (default) | `"256x256"` | `"512x512"` | `"1792x1024"` | `"1024x1792"` 83 | 84 | 85 | Size of the generated image in pixels. 86 | 87 | 88 | Sizes supported for DALL·E 2: 89 | 90 | - `"1024x1024"` 91 | - `"256x256"` 92 | - `"512x512"` 93 | 94 | Sizes supported for DALL·E 3: 95 | 96 | - `"1024x1024"` 97 | - `"1024x1792"` 98 | - `"1792x1024"` 99 | ### `Quality` — Quality of generated image 100 | 101 | `"standard"` (default) | `"hd"` 102 | 103 | 104 | Specify the OpenAI `"quality"` parameter. This option is only supported for DALL·E 3. 105 | 106 | 107 | If you specify the quality to be `"hd"`, then the cost per generated image increases. 108 | 109 | 110 | For more information on the differences between standard and HD quality, see [https://cookbook.openai.com/articles/what\_is\_new\_with\_dalle\_3](https://cookbook.openai.com/articles/what_is_new_with_dalle_3). 111 | 112 | ### `Style` — Style of generated image 113 | 114 | `"vivid"` (default) | `"natural"` 115 | 116 | 117 | Specify the OpenAI `"style"` parameter. This option is only supported for DALL·E 3. 118 | 119 | 120 | For more information on the differences between vivid and natural style, see [https://cookbook.openai.com/articles/what\_is\_new\_with\_dalle\_3](https://cookbook.openai.com/articles/what_is_new_with_dalle_3). 121 | 122 | # Output Argument 123 | ### `images` — Generated images 124 | 125 | cell array of numerical matrices 126 | 127 | 128 | Images that the model generates, returned as a cell array with `NumImages` elements. Each element of the cell array contains a generated image specified as an RGB images of size `Size`. For example, if you specify `Size="1024x1024"`, then the generated images have size `1024x1024x3`. 129 | 130 | ### `httpResponse` — HTTP response message 131 | 132 | `matlab.net.http.ResponseMessage` object 133 | 134 | 135 | Response message returned by the server, specified as a [`matlab.net.http.ResponseMessage`](https://www.mathworks.com/help/matlab/ref/matlab.net.http.responsemessage-class.html) object. 136 | 137 | # See Also 138 | 139 | [`openAIImages`](openAIImages.md) | [`edit`](edit.md) | [`createVariation`](createVariation.md) 140 | 141 | - [Using DALL·E to Generate Images](../../examples/UsingDALLEToGenerateImages.md) 142 | - [Using DALL·E to Edit Images](../../examples/UsingDALLEToEditImages.md) 143 | 144 | *Copyright 2024 The MathWorks, Inc.* 145 | 146 | -------------------------------------------------------------------------------- /doc/functions/openAIImages.md: -------------------------------------------------------------------------------- 1 | 2 | # openAIImages 3 | 4 | Connect to OpenAI® Image Generation API from MATLAB® 5 | 6 | # Creation 7 | ## Syntax 8 | 9 | `model = openAIImages` 10 | 11 | 12 | `model = openAIImages(___,APIKey=key)` 13 | 14 | 15 | `model = openAIImages(___,Name=Value)` 16 | 17 | ## Description 18 | 19 | Connect to the OpenAI Image Generation API to generate images using models developed by OpenAI. 20 | 21 | 22 | To connect to the OpenAI API, you need a valid API key. For information on how to obtain an API key, see [https://platform.openai.com/docs/quickstart](https://platform.openai.com/docs/quickstart). 23 | 24 | 25 | `model = openAIImages` creates an `openAIImages` object. Connecting to the OpenAI API requires a valid API key. Either set the environment variable `OPENAI_API_KEY` or specify the `APIKey` name\-value argument. 26 | 27 | 28 | `model = openAIImages(___,APIKey=key)` uses the specified API key. 29 | 30 | 31 | `model = openAIImages(___,Name=Value)` specifies additional options using one or more name\-value arguments. 32 | 33 | # Input Arguments 34 | ## Name\-Value Arguments 35 | ### `APIKey` — OpenAI API key 36 | 37 | character vector | string scalar 38 | 39 | 40 | OpenAI API key to access OpenAI APIs such as DALL·E. 41 | 42 | 43 | Instead of using the `APIKey` name\-value argument, you can also set the environment variable OPEN\_API\_KEY. For more information, see [OpenAI API](http://../OpenAI.md). 44 | 45 | # Properties Settable at Construction 46 | 47 | Optionally specify these properties at construction using name\-value arguments. Specify `PropertyName1=PropertyValue1,...,PropertyNameN=PropertyValueN`, where `PropertyName` is the property name and `PropertyValue` is the corresponding value. 48 | 49 | ### `ModelName` — Model name 50 | 51 | `"dall-e-2"` (default) | `"dall-e-3"` 52 | 53 | 54 | After construction, this property is read\-only. 55 | 56 | 57 | Name of the OpenAI model to use for image generation. To use DALL·E 2, set `ModelName` to `"dall-e-2"`. To use DALL·E 3, set `ModelName` to `"dall-e-3"`. 58 | 59 | ### `TimeOut` — Connection timeout in seconds 60 | 61 | `10` (default) | nonnegative numeric scalar 62 | 63 | 64 | After construction, this property is read\-only. 65 | 66 | 67 | If the OpenAI server does not respond within the timeout, then the function throws an error. 68 | 69 | # Object Functions 70 | 71 | [`generate`](openAIImages.generate.md) — Generate image using OpenAI image generation API 72 | 73 | 74 | [`edit`](edit.md) — Edit images using DALL·E 2 75 | 76 | 77 | [`createVariation`](createVariation.md) — Generate image variations using DALL·E 2 78 | 79 | # Examples 80 | ## Generate Image Using DALL·E 2 81 | 82 | First, specify the OpenAI API key as an environment variable and save it to a file called `".env"`. Next, load the environment file using the `loadenv` function. 83 | 84 | ```matlab 85 | loadenv(".env") 86 | ``` 87 | 88 | Connect to the OpenAI Images API. By default, the model is DALL·E 2. 89 | 90 | ```matlab 91 | model = openAIImages 92 | ``` 93 | 94 | ```matlabTextOutput 95 | model = 96 | openAIImages with properties: 97 | 98 | ModelName: "dall-e-2" 99 | TimeOut: 10 100 | 101 | ``` 102 | 103 | Generate and display an image based on a natural language prompt. 104 | 105 | ```matlab 106 | catImage = generate(model,"An image of a cat confused by a complicated knitting pattern."); 107 | imshow(catImage{1}) 108 | ``` 109 | 110 | ![An image of a sad cartoon cat pondering a skein of red wool.](images/openAIImages1.png) 111 | # See Also 112 | 113 | [`openAIChat`](openAIChat.md) | [`generate`](openAIImages.generate.md) | [`edit`](edit.md) | [`createVariation`](createVariation.md) 114 | 115 | - [Using DALL·E to Generate Images](../../examples/UsingDALLEToGenerateImages.md) 116 | - [Using DALL·E to Edit Images](../../examples/UsingDALLEToEditImages.md) 117 | 118 | *Copyright 2024 The MathWorks, Inc.* 119 | 120 | -------------------------------------------------------------------------------- /doc/functions/removeMessage.md: -------------------------------------------------------------------------------- 1 | 2 | # removeMessage 3 | 4 | Remove message from message history 5 | 6 | 7 | `updatedMessages = removeMessage(messages,messageIndex)` 8 | 9 | # Description 10 | 11 | `updatedMessages = removeMessage(messages,messageIndex)` removes an existing message from the specified position in the [`messageHistory`](messageHistory.md) object `messages`. 12 | 13 | # Examples 14 | ## Add Response Message to Message History 15 | 16 | Initialize the message history. 17 | 18 | ```matlab 19 | messages = messageHistory; 20 | ``` 21 | 22 | Add a user message to the message history. 23 | 24 | ```matlab 25 | messages = addUserMessage(messages,"Why is a raven like a writing desk?"); 26 | ``` 27 | 28 | Remove the message from the message history. 29 | 30 | ```matlab 31 | messages = removeMessage(messages,1) 32 | ``` 33 | 34 | ```matlabTextOutput 35 | messages = 36 | messageHistory with properties: 37 | 38 | Messages: {1x0 cell} 39 | 40 | ``` 41 | # Input Arguments 42 | ### `messages` — Message history 43 | 44 | `messageHistory` object 45 | 46 | 47 | Message history, specified as a [`messageHistory`](messageHistory.md) object. 48 | 49 | ### `messageIndex` — Message index 50 | 51 | positive integer 52 | 53 | 54 | Index of the message to remove, specified as a positive integer. 55 | 56 | # Output Argument 57 | ### `updatedMessages` — Updated message history 58 | 59 | `messageHistory` object 60 | 61 | 62 | Updated message history, specified as a [`messageHistory`](messageHistory.md) object. 63 | 64 | # See Also 65 | 66 | [`messageHistory`](messageHistory.md) | [`addSystemMessage`](addSystemMessage.md) | [`addUserMessage`](addUserMessage.md) | [`addToolMessage`](addToolMessage.md) | [`addResponseMessage`](addResponseMessage.md) | [`addUserMessageWithImages`](addUserMessageWithImages.md) 67 | 68 | - [Create Simple Chat Bot](../../examples/CreateSimpleChatBot.md) 69 | - [Create Simple Ollama Chat Bot](../../examples/CreateSimpleOllamaChatBot.md) 70 | 71 | *Copyright 2024 The MathWorks, Inc.* 72 | 73 | -------------------------------------------------------------------------------- /examples/AnalyzeScientificPapersUsingFunctionCalls.md: -------------------------------------------------------------------------------- 1 | 2 | # Analyze Scientific Papers Using ChatGPT™ Function Calls 3 | 4 | To run the code shown on this page, open the MLX file in MATLAB®: [mlx-scripts/AnalyzeScientificPapersUsingFunctionCalls.mlx](mlx-scripts/AnalyzeScientificPapersUsingFunctionCalls.mlx) 5 | 6 | This example shows how to extract recent scientific papers from ArXiv, summarize them using ChatGPT, and write the results to a CSV file using the `openAIFunction` function. 7 | 8 | - The example contains three steps: 9 | - Define a custom function for ChatGPT to use to process its input and output. 10 | - Extract papers from ArXiv. 11 | - Use ChatGPT to assess whether a paper is relevant to your query, and to add an entry to the results table if so. 12 | 13 | To run this example, you need a valid API key from a paid OpenAI™ API account. 14 | 15 | ```matlab 16 | loadenv(".env") 17 | addpath('../..') 18 | ``` 19 | # Initialize OpenAI API Function and Chat 20 | 21 | Use `openAIFunction` to define functions that the model will be able to requests calls. 22 | 23 | 24 | Set up the function to store paper details and initiate a chat with the OpenAI API with a defined role as a scientific paper expert. 25 | 26 | 27 | Define the function that you want the model to have access to. In this example the used function is `writePaperDetails`. 28 | 29 | ```matlab 30 | f = openAIFunction("writePaperDetails", "Function to write paper details to a table."); 31 | f = addParameter(f, "name", type="string", description="Name of the paper."); 32 | f = addParameter(f, "url", type="string", description="URL containing the paper."); 33 | f = addParameter(f, "explanation", type="string", description="Explanation on why the paper is related to the given topic."); 34 | 35 | paperVerifier = openAIChat("You are an expert in filtering scientific papers. " + ... 36 | "Given a certain topic, you are able to decide if the paper" + ... 37 | " fits the given topic or not."); 38 | 39 | paperExtractor = openAIChat("You are an expert in extracting information from a paper.", Tools=f); 40 | 41 | function writePaperDetails(name, url, desc) 42 | filename = "papers_to_read.csv"; 43 | T = table(name, url, desc, VariableNames=["Name", "URL", "Description"]); 44 | writetable(T, filename, WriteMode="append"); 45 | end 46 | ``` 47 | # Extract Papers From ArXiv 48 | 49 | Specify the category of interest, the date range for the query, and the maximum number of results to retrieve from the ArXiv API. 50 | 51 | ```matlab 52 | category = "cs.CL"; 53 | endDate = datetime("today", "Format","uuuuMMdd"); 54 | startDate = datetime("today", "Format","uuuuMMdd") - 5; 55 | maxResults = 40; 56 | urlQuery = "https://export.arxiv.org/api/query?search_query=" + ... 57 | "cat:" + category + ... 58 | "&submittedDate=["+string(startDate)+"+TO+"+string(endDate)+"]"+... 59 | "&max_results=" + maxResults + ... 60 | "&sortBy=submittedDate&sortOrder=descending"; 61 | 62 | options = weboptions('Timeout',160); 63 | code = webread(urlQuery,options); 64 | ``` 65 | 66 | Extract individual paper entries from the API response and use ChatGPT to determine whether each paper is related to the specified topic. 67 | 68 | 69 | ChatGPT will parse the XML file, so we only need to extract the relevant entries. 70 | 71 | ```matlab 72 | entries = extractBetween(code, '', ''); 73 | ``` 74 | # Write Relevant Information to Table 75 | 76 | Create empty file and determine the topic of interest. 77 | 78 | ```matlab 79 | filename = "papers_to_read.csv"; 80 | T = table([], [], [], VariableNames=["Name", "URL", "Description"]); 81 | writetable(T, filename); 82 | 83 | topic = "Large Language Models"; 84 | ``` 85 | 86 | Loop over the entries and see if they are relevant to the topic of interest. 87 | 88 | ```matlab 89 | for i = 1:length(entries) 90 | prompt = "Given the following paper:" + newline +... 91 | string(entries{i})+ newline +... 92 | "Is it related to the topic: "+ topic +"?" + ... 93 | " Answer 'yes' or 'no'."; 94 | [text, response] = generate(paperVerifier, prompt); 95 | 96 | ``` 97 | 98 | If the model classifies this entry as relevant, then it tries to request a function call. 99 | 100 | ```matlab 101 | if contains("yes", text, IgnoreCase=true) 102 | prompt = "Given the following paper:" + newline + string(entries{i})+ newline +... 103 | "Given the topic: "+ topic + newline + "Write the details to a table."; 104 | [text, response] = generate(paperExtractor, prompt); 105 | ``` 106 | 107 | If `function_call` if part of the response, it means the model is requesting a function call. The function call request should contain the needed arguments to call the function specified at the end of this example and defined with `openAIFunctions`. 108 | 109 | ```matlab 110 | if isfield(response, "tool_calls") 111 | funCall = response.tool_calls; 112 | functionCallAttempt(funCall); 113 | end 114 | end 115 | end 116 | ``` 117 | 118 | Read the generated file. 119 | 120 | ```matlab 121 | data = readtable("papers_to_read.csv", Delimiter=",") 122 | ``` 123 | # Helper Function 124 | 125 | This function handles function call attempts from the model, checking the function name and arguments before calling the appropriate function to store the paper details. 126 | 127 | ```matlab 128 | function functionCallAttempt(funCall) 129 | ``` 130 | 131 | The model can sometimes hallucinate function names, so you need to ensure that it's suggesting the correct name. 132 | 133 | ```matlab 134 | if funCall.function.name == "writePaperDetails" 135 | try 136 | ``` 137 | 138 | The model can sometimes return improperly formed JSON, which needs to be handled. 139 | 140 | ```matlab 141 | funArgs = jsondecode(funCall.function.arguments); 142 | catch ME 143 | error("Model returned improperly formed JSON."); 144 | end 145 | ``` 146 | 147 | The model can hallucinate arguments. The code needs to ensure the arguments have been defined before calling the function. 148 | 149 | ```matlab 150 | if isfield(funArgs, "name") && isfield(funArgs, "url") && isfield(funArgs,"explanation") 151 | writePaperDetails(string(funArgs.name), string(funArgs.url), string(funArgs.explanation)); 152 | end 153 | end 154 | end 155 | ``` 156 | 157 | *Copyright 2023\-2024 The MathWorks, Inc.* 158 | 159 | -------------------------------------------------------------------------------- /examples/AnalyzeSentimentinTextUsingChatGPTwithStructuredOutput.md: -------------------------------------------------------------------------------- 1 | 2 | # Analyze Sentiment in Text Using ChatGPT™ and Structured Output 3 | 4 | To run the code shown on this page, open the MLX file in MATLAB®: [mlx-scripts/AnalyzeSentimentinTextUsingChatGPTwithStructuredOutput.mlx](mlx-scripts/AnalyzeSentimentinTextUsingChatGPTwithStructuredOutput.mlx) 5 | 6 | This example shows how to use ChatGPT for sentiment analysis and output the results in a desired format. 7 | 8 | 9 | To run this example, you need a valid API key from a paid OpenAI™ API account. 10 | 11 | ```matlab 12 | loadenv(".env") 13 | addpath('../..') 14 | ``` 15 | 16 | Define some text to analyze the sentiment. 17 | 18 | ```matlab 19 | inputText = ["I can't stand homework."; 20 | "This sucks. I'm bored."; 21 | "I can't wait for Halloween!!!"; 22 | "I am neither for nor against the idea."; 23 | "My cat is adorable ❤️❤️"; 24 | "I hate chocolate"; 25 | "More work. Great."; 26 | "More work. Great!"]; 27 | ``` 28 | 29 | Define the system prompt. 30 | 31 | ```matlab 32 | systemPrompt = "You are an AI designed to analyze the sentiment of the provided text and " + ... 33 | "Determine whether the sentiment is positive, negative, or neutral " + ... 34 | "and provide a confidence score between 0 and 1."; 35 | prompt = "Analyze the sentiment of the provided text."; 36 | ``` 37 | 38 | Define the expected output format by providing an example – when supplied with a struct as the `ResponseFormat`, `generate` will return a struct with the same field names and data types. Use a [categorical](https://www.mathworks.com/help/matlab/categorical-arrays.html) to restrict the values that can be returned to the list `["positive","negative","neutral"]`. 39 | 40 | ```matlab 41 | prototype = struct(... 42 | "sentiment", categorical("positive",["positive","negative","neutral"]),... 43 | "confidence", 0.2) 44 | ``` 45 | 46 | ```matlabTextOutput 47 | prototype = struct with fields: 48 | sentiment: positive 49 | confidence: 0.2000 50 | 51 | ``` 52 | 53 | Create a chat object and set `ResponseFormat` to `prototype`. 54 | 55 | ```matlab 56 | chat = openAIChat(systemPrompt, ResponseFormat=prototype); 57 | ``` 58 | 59 | Concatenate the prompt and input text and generate an answer with the model. 60 | 61 | ```matlab 62 | scores = []; 63 | for i = 1:numel(inputText) 64 | ``` 65 | 66 | Generate a response from the message. 67 | 68 | ```matlab 69 | thisResponse = generate(chat,prompt + newline + newline + inputText(i)); 70 | scores = [scores; thisResponse]; %#ok 71 | end 72 | ``` 73 | 74 | Extract the content from the output structure array `scores`. 75 | 76 | ```matlab 77 | T = struct2table(scores); 78 | T.text = inputText; 79 | T = movevars(T,"text","Before","sentiment") 80 | ``` 81 | | |text|sentiment|confidence| 82 | |:--:|:--:|:--:|:--:| 83 | |1|"I can't stand homework."|negative|0.9500| 84 | |2|"This sucks. I'm bored."|negative|0.9500| 85 | |3|"I can't wait for Halloween!!!"|positive|0.9500| 86 | |4|"I am neither for nor against the idea."|neutral|0.9500| 87 | |5|"My cat is adorable ❤️❤️"|positive|0.9500| 88 | |6|"I hate chocolate"|negative|0.9500| 89 | |7|"More work. Great."|negative|0.8500| 90 | |8|"More work. Great!"|positive|0.9000| 91 | 92 | 93 | *Copyright 2024 The MathWorks, Inc.* 94 | 95 | -------------------------------------------------------------------------------- /examples/CreateSimpleChatBot.md: -------------------------------------------------------------------------------- 1 | 2 | # Create Simple ChatBot 3 | 4 | To run the code shown on this page, open the MLX file in MATLAB®: [mlx-scripts/CreateSimpleChatBot.mlx](mlx-scripts/CreateSimpleChatBot.mlx) 5 | 6 | This example shows how to create a simple chatbot using the `openAIChat` and `messageHistory` functions. 7 | 8 | 9 | When you run this example, an interactive AI chat starts in the MATLAB® Command Window. To leave the chat, type "end" or press **Ctrl+C**. 10 | 11 | - This example includes three steps: 12 | - Define model parameters, such as the maximum word count, and a stop word. 13 | - Create an openAIChat object and set up a meta prompt. 14 | - Set up the chat loop. 15 | 16 | To run this example, you need a valid API key from a paid OpenAI™ API account. 17 | 18 | # Setup Model 19 | 20 | Set the maximum allowable number of words per chat session and define the keyword that, when entered by the user, ends the chat session. This example uses the model o1. 21 | 22 | ```matlab 23 | wordLimit = 2000; 24 | stopWord = "end"; 25 | modelName = "o1"; 26 | ``` 27 | 28 | Create an instance of `openAIChat` to perform the chat and `messageHistory` to store the conversation history`.` 29 | 30 | ```matlab 31 | chat = openAIChat("You are a helpful assistant. You reply in a very concise way, keeping answers limited to short sentences.", ModelName=modelName); 32 | messages = messageHistory; 33 | ``` 34 | # Chat loop 35 | 36 | Start the chat and keep it going until it sees the word in `stopWord`. 37 | 38 | ```matlab 39 | totalWords = 0; 40 | messagesSizes = []; 41 | ``` 42 | 43 | The main loop continues indefinitely until you input the stop word or press **Ctrl+C.** 44 | 45 | ```matlab 46 | while true 47 | query = input("User: ", "s"); 48 | query = string(query); 49 | disp("User: " + query) 50 | ``` 51 | 52 | If you input the stop word, display a farewell message and exit the loop. 53 | 54 | ```matlab 55 | if query == stopWord 56 | disp("AI: Closing the chat. Have a great day!") 57 | break; 58 | end 59 | 60 | numWordsQuery = countNumWords(query); 61 | ``` 62 | 63 | If the query exceeds the word limit, display an error message and halt execution. 64 | 65 | ```matlab 66 | if numWordsQuery>wordLimit 67 | error("Your query should have less than 2000 words. You query had " + numWordsQuery + " words") 68 | end 69 | ``` 70 | 71 | Keep track of the size of each message and the total number of words used so far. 72 | 73 | ```matlab 74 | messagesSizes = [messagesSizes; numWordsQuery]; %#ok 75 | totalWords = totalWords + numWordsQuery; 76 | ``` 77 | 78 | If the total word count exceeds the limit, remove messages from the start of the session until it no longer does. 79 | 80 | ```matlab 81 | while totalWords > wordLimit 82 | totalWords = totalWords - messagesSizes(1); 83 | messages = removeMessage(messages, 1); 84 | messagesSizes(1) = []; 85 | end 86 | ``` 87 | 88 | Add the new message to the session and generate a new response. 89 | 90 | ```matlab 91 | messages = addUserMessage(messages, query); 92 | [text, response] = generate(chat, messages); 93 | 94 | disp("AI: " + text) 95 | ``` 96 | 97 | Count the number of words in the response and update the total word count. 98 | 99 | ```matlab 100 | numWordsResponse = countNumWords(text); 101 | messagesSizes = [messagesSizes; numWordsResponse]; %#ok 102 | totalWords = totalWords + numWordsResponse; 103 | ``` 104 | 105 | Add the response to the session. 106 | 107 | ```matlab 108 | messages = addResponseMessage(messages, response); 109 | end 110 | ``` 111 | 112 | ```matlabTextOutput 113 | User: Hello, how much do you know about physics? 114 | AI: I have broad knowledge spanning classical, relativistic, and quantum physics. 115 | User: What is torque? 116 | AI: Torque is a measure of how a force causes rotational motion around an axis. 117 | User: What is force? 118 | AI: Force is an interaction that changes an object's state of motion. 119 | User: What is motion? 120 | AI: Motion is the change in an object's position over time. 121 | User: What is time? 122 | AI: Time is the dimension in which events occur sequentially from past to future. 123 | User: end 124 | AI: Closing the chat. Have a great day! 125 | ``` 126 | # `countNumWords` function 127 | 128 | Function to count the number of words in a text string 129 | 130 | ```matlab 131 | function numWords = countNumWords(text) 132 | numWords = doclength(tokenizedDocument(text)); 133 | end 134 | ``` 135 | 136 | *Copyright 2023\-2025 The MathWorks, Inc.* 137 | 138 | -------------------------------------------------------------------------------- /examples/DescribeImagesUsingChatGPT.md: -------------------------------------------------------------------------------- 1 | 2 | # Describe Images Using ChatGPT™ 3 | 4 | To run the code shown on this page, open the MLX file in MATLAB®: [mlx-scripts/DescribeImagesUsingChatGPT.mlx](mlx-scripts/DescribeImagesUsingChatGPT.mlx) 5 | 6 | This example shows how to generate image descriptions using the addUserMessageWithImages function. To run this example, you need a valid API key from a paid OpenAI™ API account, and a history of successful payment. 7 | 8 | ```matlab 9 | loadenv(".env") 10 | addpath('../..') 11 | ``` 12 | # Load and Display Image Data 13 | 14 | Load the sample image from Wikipedia. Use the `imread` function to read images from URLs or filenames. 15 | 16 | ```matlab 17 | image_url = 'https://upload.wikimedia.org/wikipedia/commons/thumb/d/dd/Gfp-wisconsin-madison-the-nature-boardwalk.jpg/2560px-Gfp-wisconsin-madison-the-nature-boardwalk.jpg'; 18 | im = imread(image_url); 19 | imshow(im) 20 | ``` 21 | 22 | ![figure_0.png](DescribeImagesUsingChatGPT_media/figure_0.png) 23 | # Generate Image Descriptions 24 | 25 | Ask questions about the image with the URL. 26 | 27 | ```matlab 28 | chat = openAIChat("You are an AI assistant."); 29 | ``` 30 | 31 | Create a message and pass the image URL along with the prompt. 32 | 33 | ```matlab 34 | messages = messageHistory; 35 | messages = addUserMessageWithImages(messages,"What is in the image?", string(image_url)); 36 | ``` 37 | 38 | Generate a response. 39 | 40 | ```matlab 41 | [txt,~,response] = generate(chat,messages); 42 | if response.StatusCode == "OK" 43 | wrappedText = wrapText(txt) 44 | else 45 | response.Body.Data.error 46 | end 47 | ``` 48 | 49 | ```matlabTextOutput 50 | wrappedText = 51 | "The image depicts a scenic outdoor landscape featuring a wooden pathway or 52 | boardwalk extending through a lush green field. The field is filled with tall 53 | grass and bordered by greenery, including trees and shrubbery. Above, the sky 54 | is bright and cloudy, creating a serene and tranquil atmosphere." 55 | 56 | ``` 57 | # Helper function 58 | ```matlab 59 | function wrappedText = wrapText(text) 60 | s = textwrap(text,80); 61 | wrappedText = string(join(s,newline)); 62 | end 63 | ``` 64 | 65 | *Copyright 2024 The MathWorks, Inc.* 66 | 67 | -------------------------------------------------------------------------------- /examples/DescribeImagesUsingChatGPT_media/figure_0.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/matlab-deep-learning/llms-with-matlab/d54de597b9235796f4e05a6bb752f2d1b4cd9c47/examples/DescribeImagesUsingChatGPT_media/figure_0.png -------------------------------------------------------------------------------- /examples/InformationRetrievalUsingOpenAIDocumentEmbedding.md: -------------------------------------------------------------------------------- 1 | 2 | # Information Retrieval Using OpenAI™ Document Embedding 3 | 4 | To run the code shown on this page, open the MLX file in MATLAB®: [mlx-scripts/InformationRetrievalUsingOpenAIDocumentEmbedding.mlx](mlx-scripts/InformationRetrievalUsingOpenAIDocumentEmbedding.mlx) 5 | 6 | This example shows how to find documents to answer queries using the 'text\-embedding\-3\-small' document embedding model. Embeddings are used to represent documents and queries in a high\-dimensional space, allowing for the efficient retrieval of relevant information based on semantic similarity. 7 | 8 | 9 | The example consists of four steps: 10 | 11 | - Download and preprocess text from several MATLAB® documentation pages. 12 | - Embed query document and document corpus using the "text\-embedding\-3\-small" document embedding. 13 | - Find the documentation page most relevant to the query using cosine similarity scores. 14 | - Generate an answer to the query based on the most relevant documentation page. 15 | 16 | This process is sometimes referred to as Retrieval\-Augmented Generation (RAG), similar to the application found in the example [ExampleRetrievalAugmentedGeneration.mlx](./ExampleRetrievalAugmentedGeneration.mlx). 17 | 18 | 19 | This example requires Text Analytics Toolbox™. 20 | 21 | 22 | To run this example, you need a valid API key from a paid OpenAI API account. 23 | 24 | ```matlab 25 | loadenv(".env") 26 | addpath('../..') 27 | ``` 28 | # Embed Query Document 29 | 30 | Convert the query into a numerical vector using the extractOpenAIEmbeddings function. Specify the model as "text\-embedding\-3\-small". 31 | 32 | ```matlab 33 | query = "What is the best way to store data made up of rows and columns?"; 34 | [qEmb, ~] = extractOpenAIEmbeddings(query, ModelName="text-embedding-3-small"); 35 | qEmb(1:5) 36 | ``` 37 | 38 | ```matlabTextOutput 39 | ans = 1x5 40 | -0.0051 -0.0005 0.0362 -0.0069 0.0534 41 | 42 | ``` 43 | # Download and Embed Source Text 44 | 45 | In this example, we will scrape content from several MATLAB documentation pages. 46 | 47 | 48 | This requires the following steps: 49 | 50 | 1. Start with a list of websites. This examples uses pages from MATLAB documentation. 51 | 2. Extract the context of the pags using `extractHTMLText`. 52 | 3. Embed the websites using `extractOpenAIEmbeddings`. 53 | ```matlab 54 | metadata = ["https://www.mathworks.com/help/matlab/numeric-types.html"; 55 | "https://www.mathworks.com/help/matlab/characters-and-strings.html"; 56 | "https://www.mathworks.com/help/matlab/date-and-time-operations.html"; 57 | "https://www.mathworks.com/help/matlab/categorical-arrays.html"; 58 | "https://www.mathworks.com/help/matlab/tables.html"]; 59 | id = (1:numel(metadata))'; 60 | document = strings(numel(metadata),1); 61 | embedding = []; 62 | for ii = id' 63 | page = webread(metadata(ii)); 64 | tree = htmlTree(page); 65 | subtree = findElement(tree,"body"); 66 | document(ii) = extractHTMLText(subtree, ExtractionMethod="article"); 67 | try 68 | [emb, ~] = extractOpenAIEmbeddings(document(ii),ModelName="text-embedding-3-small"); 69 | embedding = [embedding; emb]; 70 | catch 71 | end 72 | end 73 | vectorTable = table(id,document,metadata,embedding); 74 | ``` 75 | # Generate Answer to Query 76 | 77 | Define the system prompt in `openAIChat` to answer questions based on context. 78 | 79 | ```matlab 80 | chat = openAIChat("You are a helpful MATLAB assistant. You will get a context for each question"); 81 | ``` 82 | 83 | Calculate the cosine similarity scores between the query and each of the documentation page using the `cosineSimilarity` function. 84 | 85 | ```matlab 86 | s = cosineSimilarity(vectorTable.embedding,qEmb); 87 | ``` 88 | 89 | Use the most similar documentation content to feed extra context into the prompt for generation. 90 | 91 | ```matlab 92 | [~,idx] = max(s); 93 | context = vectorTable.document(idx); 94 | prompt = "Context: " ... 95 | + context + newline + "Answer the following question: " + query; 96 | wrapText(prompt) 97 | ``` 98 | 99 | ```matlabTextOutput 100 | ans = 101 | "Context: table is a data type suitable for column-oriented or tabular data that is often stored as columns in a text file or in a spreadsheet. 102 | Tables consist of rows and column-oriented variables. 103 | Each variable in a table can have a different data type and a different size with the one restriction that each variable must have the same number of rows. 104 | For more information, see Create Tables and Assign Data to Them or watch Tables and Categorical Arrays. 105 | Answer the following question: What is the best way to store data made up of rows and columns?" 106 | 107 | ``` 108 | 109 | Pass the question and the context for generation to get a contextualized answer. 110 | 111 | ```matlab 112 | response = generate(chat, prompt); 113 | wrapText(response) 114 | ``` 115 | 116 | ```matlabTextOutput 117 | ans = 118 | "The best way to store data made up of rows and columns in MATLAB is by using the table data type. 119 | Tables are designed for storing tabular data, where each column can have a different data type and size, but all columns must have the same number of rows. 120 | This makes tables an ideal data structure for organizing and manipulating data in a tabular format." 121 | 122 | ``` 123 | # Helper Function 124 | 125 | Helper function to wrap text for easier reading in the live script. 126 | 127 | ```matlab 128 | function wrappedText = wrapText(text) 129 | wrappedText = splitSentences(text); 130 | wrappedText = join(wrappedText,newline); 131 | end 132 | ``` 133 | 134 | *Copyright 2024 The MathWorks, Inc.* 135 | 136 | -------------------------------------------------------------------------------- /examples/ProcessGeneratedTextinRealTimebyUsingChatGPTinStreamingMode.md: -------------------------------------------------------------------------------- 1 | 2 | # Process Generated Text in Real Time by Using ChatGPT™ in Streaming Mode 3 | 4 | To run the code shown on this page, open the MLX file in MATLAB®: [mlx-scripts/ProcessGeneratedTextinRealTimebyUsingChatGPTinStreamingMode.mlx](mlx-scripts/ProcessGeneratedTextinRealTimebyUsingChatGPTinStreamingMode.mlx) 5 | 6 | This example shows how to process generated text in real time by using ChatGPT in streaming mode. 7 | 8 | 9 | By default, when you pass a prompt to ChatGPT, it generates a response internally and then outputs it in full at the end. To print out and format generated text as the model is generating it, use the `StreamFun` name\-value argument of the `openAIChat` class. The streaming function is a custom function handle that tells the model what to do with the output. 10 | 11 | 12 | The example includes two parts: 13 | 14 | - First, define and use a custom streaming function to print out generated text directly as the model generates it. 15 | - Then, create an HTML UI Component and define and use a custom streaming function to update the UI Component in real time as the model generates text. 16 | 17 | To run this example, you need a valid API key from a paid OpenAI™ API account. 18 | 19 | ```matlab 20 | loadenv(".env") 21 | addpath('../..') 22 | ``` 23 | # Print Stream Directly to Screen 24 | 25 | In this example, the streamed output is printed directly to the screen. 26 | 27 | 28 | Define the function to print the returned tokens. 29 | 30 | ```matlab 31 | function printToken(token) 32 | fprintf("%s",token); 33 | end 34 | ``` 35 | 36 | Create the chat object with the defined function as a handle. 37 | 38 | ```matlab 39 | chat = openAIChat(StreamFun=@printToken); 40 | ``` 41 | 42 | Generate response to a prompt in streaming mode. 43 | 44 | ```matlab 45 | prompt = "What is Model-Based Design?"; 46 | generate(chat, prompt, MaxNumTokens=500); 47 | ``` 48 | 49 | ```matlabTextOutput 50 | Model-Based Design is an approach to system development that uses graphical models to design and simulate systems before implementing them in hardware or software. It involves creating models that represent the behavior and interactions of system components, and using these models to analyze, validate, and optimize the system before building it. Model-Based Design can help to improve the efficiency, reliability, and quality of system development by enabling engineers to explore design alternatives, detect errors early in the development process, and facilitate collaboration between different teams working on the same project. 51 | ``` 52 | # Print Stream to HTML UI Component 53 | 54 | In this example, the streamed output is printed to the HTML component. 55 | 56 | 57 | Create the HTML UI component. 58 | 59 | ```matlab 60 | fig = uifigure; 61 | h = uihtml(fig,Position=[50,10,450,400]); 62 | ``` 63 | 64 | Initialize the content of the HTML UI component. 65 | 66 | ```matlab 67 | resetTable(h); 68 | ``` 69 | 70 | Create the chat object with the function handle, which requires the `uihtml` object created earlier. 71 | 72 | ```matlab 73 | chat = openAIChat(StreamFun=@(x)printStream(h,x)); 74 | ``` 75 | 76 | Add the user prompt to the table in the HTML UI component. 77 | 78 | ```matlab 79 | userPrompt = "Tell me 5 jokes."; 80 | addChat(h,"user",userPrompt,"new") 81 | ``` 82 | 83 | Generate response to a prompt in streaming mode. 84 | 85 | ```matlab 86 | [txt, message, response] = generate(chat,userPrompt); 87 | ``` 88 | 89 | Update the last row with the final output. This is necessary if further update is needed to support additional HTML formatting. 90 | 91 | ```matlab 92 | addChat(h,"assistant",txt,"current") 93 | ``` 94 | # Helper functions 95 | 96 | `resetTable`: 97 | 98 | 1. Adds the basic HTML structure and the JavaScript that process the data change in MATLAB. 99 | 2. The JavaScript gets a reference to the table and changed data and if the 3rd element in the data is "new", adds a new row. 100 | 3. It populates the new row with two cells and updates the cells from the first two elements of the data. 101 | 4. The new row is then appended to the table. 102 | 5. Otherwise, the JavaScript gets reference to the last cell of the last row of the table, and update it with the 2nd element of the data. 103 | ```matlab 104 | function resetTable(obj) 105 | %RESETTABLE initialize the HTML UI component in the input argument. 106 | mustBeA(obj,'matlab.ui.control.HTML') 107 | obj.HTMLSource = ['' ... 108 | '
RoleContent
']; 127 | obj.Data = []; 128 | drawnow 129 | end 130 | ``` 131 | 132 | `addRow` adds a new row to the table in the HTML UI component 133 | 134 | ```matlab 135 | function addChat(obj,role,content,row) 136 | %ADDCHAT adds a new row or updates the last row of the table 137 | mustBeA(obj,'matlab.ui.control.HTML') 138 | content = replace(content,newline,"
"); 139 | obj.Data = {role,content,row}; 140 | drawnow 141 | end 142 | ``` 143 | 144 | `printStream` is the streaming function and prints the stream in the table in the HTML UI component 145 | 146 | ```matlab 147 | function printStream(h,x) 148 | %PRINTSTREAM prints the stream in a new row in the table 149 | if strlength(x) == 0 150 | % if the first token is 0 length, add a new row 151 | tokens = string(x); 152 | h.Data = {"assistant",tokens,"new"}; 153 | else 154 | % otherwise append the new token to the previous tokens 155 | % if the new token contains a line break, replace 156 | % it with
157 | if contains(x,newline) 158 | x = replace(x,newline,"
"); 159 | end 160 | tokens = h.Data{2} + string(x); 161 | % update the existing row. 162 | h.Data = {"assistant",tokens,"current"}; 163 | end 164 | drawnow 165 | end 166 | ``` 167 | 168 | *Copyright 2024 The MathWorks, Inc.* 169 | 170 | -------------------------------------------------------------------------------- /examples/RetrievalAugmentedGenerationUsingChatGPTandMATLAB.md: -------------------------------------------------------------------------------- 1 | 2 | # Retrieval\-Augmented Generation Using ChatGPT™ and MATLAB 3 | 4 | To run the code shown on this page, open the MLX file in MATLAB®: [mlx-scripts/RetrievalAugmentedGenerationUsingChatGPTandMATLAB.mlx](mlx-scripts/RetrievalAugmentedGenerationUsingChatGPTandMATLAB.mlx) 5 | 6 | This example shows how to use retrieval\-augmented generation to generate answers to queries based on information contained in a document corpus. 7 | 8 | 9 | The example contains three steps: 10 | 11 | - Download and preprocess documents. 12 | - Find documents relevant to a query using keyword search. 13 | - Generate a response using ChatGPT based on the both the query and the most relevant source document. \-> title "Generate Response" 14 | 15 | This example requires Text Analytics Toolbox™. 16 | 17 | 18 | To run this example, you need a valid API key from a paid OpenAI™ API account. 19 | 20 | ```matlab 21 | loadenv(".env") 22 | addpath('../..') 23 | ``` 24 | # Download and Preprocess Documents 25 | 26 | Specify the URLs of the reports. 27 | 28 | ```matlab 29 | url = ["https://openknowledge.worldbank.org/bitstreams/0c18c872-91f0-51a4-ba91-c36b98893b4a/download" 30 | "https://openknowledge.worldbank.org/bitstreams/476f037b-a17e-484f-9cc2-282a2e5a929f/download" 31 | "https://openknowledge.worldbank.org/bitstreams/0c18c872-91f0-51a4-ba91-c36b98893b4a/download"]; 32 | ``` 33 | 34 | Define the local path where the reports will be saved and download the reports using the provided URLs and save them to the specified local path. 35 | 36 | ```matlab 37 | localpath = "./data/"; 38 | if ~exist(localpath, "dir") 39 | mkdir(localpath); 40 | end 41 | numFiles = numel(url); 42 | for i = 1:numFiles 43 | filename = "WBD_" + i + ".pdf"; 44 | local_file_name = fullfile(localpath, filename); 45 | if ~exist(local_file_name,"file") 46 | websave(local_file_name, url{i}, weboptions(Timeout=30)); 47 | end 48 | end 49 | ``` 50 | 51 | Define the function to read the text from the downloaded files. 52 | 53 | ```matlab 54 | readFcn = @extractFileText; 55 | file_pattern = [".txt",".pdf",".docx",".html",".htm"]; 56 | fds = fileDatastore(localpath,'FileExtensions',file_pattern,'ReadFcn',readFcn); 57 | 58 | str = readall(fds); 59 | str = [str{:}]; 60 | ``` 61 | 62 | Split the text data into paragraphs with the helper function `preprocessDocuments`. 63 | 64 | ```matlab 65 | documents = preprocessDocuments(str); 66 | ``` 67 | 68 | Initialize the chatbot with a system prompt and API key. Include your API key in the environment variable `OPENAI_API_KEY` or pass your key using the `APIKey` name\-value pair. 69 | 70 | ```matlab 71 | chat = openAIChat("You are a helpful assistant. You will get a " + ... 72 | "context for each question, but only use the information " + ... 73 | "in the context if that makes sense to answer the question. " + ... 74 | "Let's think step-by-step, explaining how you reached the answer."); 75 | ``` 76 | # Retrieve Relevant Documents 77 | 78 | Define the query, then retrieve and filter the relevant documents based on the query. 79 | 80 | ```matlab 81 | query = "What technical criteria can be used to streamline new approvals for grid-friendly DPV?"; 82 | ``` 83 | 84 | Tokenize the query and find similarity scores between the query and documents. 85 | 86 | ```matlab 87 | embQuery = bm25Similarity(documents, tokenizedDocument(query)); 88 | ``` 89 | 90 | Sort the documents in descending order of similarity scores. 91 | 92 | ```matlab 93 | [~, idx] = sort(embQuery, "descend"); 94 | limitWords = 1000; 95 | selectedDocs = []; 96 | totalWords = 0; 97 | ``` 98 | 99 | Iterate over sorted document indices until word limit is reached 100 | 101 | ```matlab 102 | i = 1; 103 | while totalWords <= limitWords && i <= length(idx) 104 | totalWords = totalWords + doclength(documents(idx(i))); 105 | selectedDocs = [selectedDocs; joinWords(documents(idx(i)))]; 106 | i = i + 1; 107 | end 108 | ``` 109 | # Generate Response 110 | 111 | Define the prompt for the chatbot and generate a response. 112 | 113 | ```matlab 114 | prompt = "Context:" + join(selectedDocs, " ") + newline + ... 115 | "Answer the following question: " + query; 116 | response = generate(chat, prompt); 117 | ``` 118 | 119 | Wrap the text for easier visualization. 120 | 121 | ```matlab 122 | wrapText(response) 123 | ``` 124 | 125 | ```matlabTextOutput 126 | ans = 127 | "The context provides information on how technical criteria can be used to 128 | streamline new approvals for grid-friendly DPV. It mentions that technical 129 | approvals for DPV installations to connect to the grid can be streamlined with 130 | prudent screening criteria for systems that meet certain specifications. 131 | Additionally, it emphasizes the importance of having a grid code that reflects 132 | expected future growth of distributed energy resources. 133 | 134 | Therefore, the technical criteria that can be used to streamline new approvals 135 | for grid-friendly DPV include having prudent screening criteria based on 136 | specific specifications and ensuring that the grid code is in line with the 137 | expected growth of distributed resources. This helps in facilitating the 138 | connection of DPV installations to the grid efficiently and effectively." 139 | 140 | ``` 141 | # Helper Functions 142 | ```matlab 143 | function allDocs = preprocessDocuments(str) 144 | tokenized = tokenizedDocument(join(str,[newline newline])); 145 | allDocs = splitParagraphs(tokenized); 146 | end 147 | 148 | function wrappedText = wrapText(text) 149 | s = textwrap(text,80); 150 | wrappedText = string(join(s,newline)); 151 | end 152 | ``` 153 | # References 154 | 155 | *Energy Sector Management Assistance Program (ESMAP). 2023. From Sun to Roof to Grid: Power Systems and Distributed PV. Technical Report. Washington, DC: World Bank. License: Creative Commons Attribution CC BY 3.0 IGO* 156 | 157 | 158 | *Copyright 2024 The MathWorks, Inc.* 159 | 160 | -------------------------------------------------------------------------------- /examples/RetrievalAugmentedGenerationUsingOllamaAndMATLAB.md: -------------------------------------------------------------------------------- 1 | 2 | # Retrieval\-Augmented Generation Using Ollama™ and MATLAB 3 | 4 | To run the code shown on this page, open the MLX file in MATLAB®: [mlx-scripts/RetrievalAugmentedGenerationUsingOllamaAndMATLAB.mlx](mlx-scripts/RetrievalAugmentedGenerationUsingOllamaAndMATLAB.mlx) 5 | 6 | This example shows how to use retrieval\-augmented generation to generate answers to queries based on information contained in a document corpus. 7 | 8 | 9 | The example contains three steps: 10 | 11 | - Download and preprocess documents. 12 | - Find documents relevant to a query using keyword search. 13 | - Generate a response using Ollama based on the both the query and the most relevant source document. 14 | 15 | This example requires Text Analytics Toolbox™ and a running Ollama service. As written, it requires the Mistral® NeMo model to be installed in that Ollama instance. 16 | 17 | # Download and Preprocess Documents 18 | 19 | Specify the URLs of the reports. 20 | 21 | ```matlab 22 | url = ["https://openknowledge.worldbank.org/bitstreams/0c18c872-91f0-51a4-ba91-c36b98893b4a/download" 23 | "https://openknowledge.worldbank.org/bitstreams/476f037b-a17e-484f-9cc2-282a2e5a929f/download" 24 | "https://openknowledge.worldbank.org/bitstreams/0c18c872-91f0-51a4-ba91-c36b98893b4a/download"]; 25 | ``` 26 | 27 | Define the local path where the reports will be saved and download the reports using the provided URLs and save them to the specified local path. 28 | 29 | ```matlab 30 | localpath = "./data/"; 31 | if ~exist(localpath, "dir") 32 | mkdir(localpath); 33 | end 34 | numFiles = numel(url); 35 | for i = 1:numFiles 36 | filename = "WBD_" + i + ".pdf"; 37 | localFileName = fullfile(localpath, filename); 38 | if ~exist(localFileName,"file") 39 | websave(localFileName, url{i}, weboptions(Timeout=30)); 40 | end 41 | end 42 | ``` 43 | 44 | Define the function to read the text from the downloaded files. 45 | 46 | ```matlab 47 | readFcn = @extractFileText; 48 | filePattern = [".txt",".pdf",".docx",".html",".htm"]; 49 | fds = fileDatastore(localpath,'FileExtensions',filePattern,'ReadFcn',readFcn); 50 | 51 | str = readall(fds); 52 | str = [str{:}]; 53 | ``` 54 | 55 | Split the text data into paragraphs with the helper function `preprocessDocuments`. 56 | 57 | ```matlab 58 | documents = preprocessDocuments(str); 59 | ``` 60 | 61 | Initialize the chatbot with the model name (Mistral NeMo) and a generic system prompt. Due to the long input created below, responses may take a long time on older machines; increase the accepted timeout. 62 | 63 | ```matlab 64 | chat = ollamaChat("mistral-nemo", ... 65 | "You are a helpful assistant. You will get a " + ... 66 | "context for each question, but only use the information " + ... 67 | "in the context if that makes sense to answer the question. " + ... 68 | "Let's think step-by-step, explaining how you reached the answer.", ... 69 | TimeOut=600); 70 | ``` 71 | # Retrieve Relevant Documents 72 | 73 | Define the query, then retrieve and filter the relevant documents based on the query. 74 | 75 | ```matlab 76 | query = "What technical criteria can be used to streamline new approvals for grid-friendly DPV?"; 77 | ``` 78 | 79 | Tokenize the query and find similarity scores between the query and documents. 80 | 81 | ```matlab 82 | embQuery = bm25Similarity(documents, tokenizedDocument(query)); 83 | ``` 84 | 85 | Sort the documents in descending order of similarity scores. 86 | 87 | ```matlab 88 | [~, idx] = sort(embQuery, "descend"); 89 | limitWords = 1000; 90 | selectedDocs = []; 91 | totalWords = 0; 92 | ``` 93 | 94 | Iterate over sorted document indices until word limit is reached 95 | 96 | ```matlab 97 | i = 1; 98 | while totalWords <= limitWords && i <= length(idx) 99 | totalWords = totalWords + doclength(documents(idx(i))); 100 | selectedDocs = [selectedDocs; joinWords(documents(idx(i)))]; 101 | i = i + 1; 102 | end 103 | ``` 104 | # Generate Response 105 | 106 | Define the prompt for the chatbot and generate a response. 107 | 108 | ```matlab 109 | prompt = "Context:" + join(selectedDocs, " ") + newline + ... 110 | "Answer the following question: " + query; 111 | response = generate(chat, prompt); 112 | ``` 113 | 114 | Wrap the text for easier visualization. 115 | 116 | ```matlab 117 | wrapText(response) 118 | ``` 119 | 120 | ```matlabTextOutput 121 | ans = 122 | "Based on the provided context, several technical criteria can be used to 123 | streamline new approvals for grid-friendly DPV (Distributed Photovoltaics). 124 | These include: 125 | 126 | 1. **Inverter Programming:** Ensuring inverters have appropriate programming to 127 | provide valuable services such as reactive power control for voltage management 128 | or active power curtailment for congestion management. 129 | 2. **Capacity Building:** Timely capacity building of personnel to manage high 130 | shares of DPV is crucial. This includes training staff on grid integration, 131 | operation, and maintenance issues related to DPV. 132 | 3. **Grid Code Adherence:** Adhering to a grid code that reflects expected 133 | future growth of distributed energy resources. This ensures technical rules 134 | keep pace with installed DPV capacity. 135 | 4. **Prudent Screening Criteria:** Using prudent screening criteria for systems 136 | that meet certain specifications. For example, metrics like DPV capacity 137 | penetration relative to minimum feeder daytime load can be considered. 138 | 139 | By applying these criteria and using case-by-case appraisal, new approvals for 140 | grid-friendly DPV installations can potentially be streamlined while 141 | maintaining grid reliability and stability." 142 | 143 | ``` 144 | # Helper Functions 145 | ```matlab 146 | function allDocs = preprocessDocuments(str) 147 | tokenized = tokenizedDocument(join(str,[newline newline])); 148 | allDocs = splitParagraphs(tokenized); 149 | end 150 | 151 | function wrappedText = wrapText(text) 152 | s = textwrap(text,80); 153 | wrappedText = string(join(s,newline)); 154 | end 155 | ``` 156 | # References 157 | 158 | *Energy Sector Management Assistance Program (ESMAP). 2023. From Sun to Roof to Grid: Power Systems and Distributed PV. Technical Report. Washington, DC: World Bank. License: Creative Commons Attribution CC BY 3.0 IGO* 159 | 160 | 161 | *Copyright 2024 The MathWorks, Inc.* 162 | 163 | -------------------------------------------------------------------------------- /examples/SummarizeLargeDocumentsUsingChatGPTandMATLAB.md: -------------------------------------------------------------------------------- 1 | 2 | # Summarize Large Documents Using ChatGPT™ and MATLAB® 3 | 4 | To run the code shown on this page, open the MLX file in MATLAB®: [mlx-scripts/SummarizeLargeDocumentsUsingChatGPTandMATLAB.mlx](mlx-scripts/SummarizeLargeDocumentsUsingChatGPTandMATLAB.mlx) 5 | 6 | This example shows how to use ChatGPT to summarize documents that are too large to be summarized at once. 7 | 8 | 9 | To summarize short documents using ChatGPT, you can pass the documents directly as a prompt together with an instruction to summarize them. However, ChatGPT can only process prompts of limited size. 10 | 11 | 12 | To summarize documents that are larger than this limit, split the documents up into smaller documents. Summarize the smaller document chunks, then pass all of the summaries to ChatGPT to generate one overall summary. 13 | 14 | - This example includes four steps: 15 | - Download the complete text of "Alice in Wonderland" by Lewis Carroll from Project Gutenberg. 16 | - Split the documents up into chunks of less than 3000 words. 17 | - Use ChatGPT to create summaries of each chunk. 18 | - Then use ChatGPT to create a summary of all of the summaries. 19 | 20 | To run this example, you need Text Analytics Toolbox™. 21 | 22 | 23 | To run this example, you need a valid API key from a paid OpenAI™ API account. 24 | 25 | ```matlab 26 | loadenv(".env") 27 | addpath('../..') 28 | ``` 29 | # Download Text Data 30 | 31 | Download and read the content from Alice's Adventures in Wonderland by Lewis Carroll from Project Gutenberg. 32 | 33 | 34 | First read the contents of the webpage. 35 | 36 | ```matlab 37 | options = weboptions(Timeout=30); 38 | code = webread("https://www.gutenberg.org/files/11/11-h/11-h.htm", options); 39 | longText = extractHTMLText(string(code)); 40 | ``` 41 | # Split Document Into Chunks 42 | 43 | Large language models have a limit in terms of how much text they can accept as input, so if you try to summarize the complete book, you will likely get an error. A workaround is splitting the book into chunks and summarize each chunk individually. The chunk size is defined in `limitChunkWords`, which restricts the numbers of words in a chunk. 44 | 45 | ```matlab 46 | incrementalSummary = longText; 47 | limitChunkWords = 3000; 48 | chunks = createChunks(incrementalSummary, limitChunkWords); 49 | ``` 50 | # Summarize Chunks 51 | 52 | Initialize a ChatGPT session with the role of summarizing text 53 | 54 | ```matlab 55 | summarizer = openAIChat("You are a professional summarizer."); 56 | ``` 57 | 58 | Looping process to gradually summarize the text chunk by chunk, reducing the chunk size with each iteration. 59 | 60 | ```matlab 61 | numCalls = 0; 62 | while numel(chunks)>1 63 | summarizedChunks = strings(size(chunks)); 64 | numCalls = numCalls + numel(chunks); 65 | ``` 66 | 67 | Add a limit to the number of calls, to ensure you are not making more calls than what is expected. You can change this value to match what is needed for your application. 68 | 69 | ```matlab 70 | if numCalls > 20 71 | error("Document is too long to be summarized.") 72 | end 73 | 74 | for i = 1:length(chunks) 75 | summarizedChunks(i) = generate(summarizer, "Summarize this content:" + newline + chunks(i)); 76 | end 77 | ``` 78 | 79 | Merge the summarized chunks to serve as the base for the next iteration. 80 | 81 | ```matlab 82 | incrementalSummary = join(summarizedChunks); 83 | ``` 84 | 85 | Form new chunks with a reduced size for the subsequent iteration. 86 | 87 | ```matlab 88 | chunks = createChunks(incrementalSummary, limitChunkWords); 89 | end 90 | ``` 91 | # Summarize Document 92 | 93 | Compile the final summary by combining the summaries from all the chunks. 94 | 95 | ```matlab 96 | fullSummary = generate(summarizer, "The following text is a combination of summaries. " + ... 97 | "Provide a cohese and coherent summary combining these smaller summaries, preserving as much information as possible:" + newline + incrementalSummary); 98 | wrapText(fullSummary) 99 | ``` 100 | 101 | ```matlabTextOutput 102 | ans = 103 | ""Alice's Adventures in Wonderland" by Lewis Carroll follows the whimsical journey of a young girl, Alice, who falls into a fantastical world through a rabbit hole. 104 | Throughout her adventures, Alice encounters a series of peculiar characters and bizarre events while trying to find her way back home. 105 | She navigates through surreal situations such as a Caucus-race with talking animals, converses with a cryptic Caterpillar about identity and size changes, and experiences a mad tea party with the March Hare and the Hatter. 106 | Alice also interacts with the Queen of Hearts during a chaotic croquet game, intervenes in a trial involving the theft of tarts, and meets the Mock Turtle and Gryphon who share odd stories and engage in whimsical discussions about lobsters and fish tails. 107 | The narrative is filled with illogical and imaginative elements, capturing readers' imaginations with its colorful and eccentric storytelling." 108 | 109 | ``` 110 | # `createChunks` function 111 | 112 | This function segments a long text into smaller parts of a predefined size to facilitate easier summarization. It preserves the structure of sentences. The `chunkSize` should be large enough to fit at least one sentence. 113 | 114 | ```matlab 115 | function chunks = createChunks(text, chunkSize) 116 | % Tokenizing the input text for processing 117 | text = tokenizedDocument(text); 118 | 119 | % Splitting the tokenized text into individual sentences 120 | text = splitSentences(text); 121 | chunks = []; 122 | currentChunk = ""; 123 | currentChunkSize = 0; 124 | 125 | % Iterating through the sentences to aggregate them into chunks until the chunk 126 | % attains the predefined size, after which a new chunk is started 127 | for i=1:length(text) 128 | newChunkSize = currentChunkSize + doclength(text(i)); 129 | if newChunkSize < chunkSize 130 | currentChunkSize = currentChunkSize + doclength(text(i)); 131 | currentChunk = currentChunk + " " + joinWords(text(i)); 132 | else 133 | chunks = [chunks; currentChunk]; %#ok 134 | currentChunkSize = doclength(text(i)); 135 | currentChunk = joinWords(text(i)); 136 | end 137 | end 138 | end 139 | ``` 140 | # `wrapText` function 141 | 142 | This function splits text into sentences and then concatenates them again using `newline` to make it easier to visualize text in this example 143 | 144 | ```matlab 145 | function wrappedText = wrapText(text) 146 | wrappedText = splitSentences(text); 147 | wrappedText = join(wrappedText,newline); 148 | end 149 | ``` 150 | 151 | *Copyright 2023 The MathWorks, Inc.* 152 | 153 | -------------------------------------------------------------------------------- /examples/UsingDALLEToEditImages.md: -------------------------------------------------------------------------------- 1 | 2 | # Using DALL·E™ to Edit Images 3 | 4 | To run the code shown on this page, open the MLX file in MATLAB®: [mlx-scripts/UsingDALLEToEditImages.mlx](mlx-scripts/UsingDALLEToEditImages.mlx) 5 | 6 | This example shows how to generate and edit images using the `openAIImages` object. 7 | 8 | 9 | To run this example, you need a valid OpenAI™ API key. Creating images using DALL•E may incur a fee. 10 | 11 | ```matlab 12 | loadenv(".env") 13 | addpath("../..") 14 | ``` 15 | 16 | We want to load images files relative to the project directory below: 17 | 18 | ```matlab 19 | projectDir = fileparts(which("openAIImages")); 20 | ``` 21 | # Generate image variations 22 | 23 | Use the image variation feature in DALL•E 2. 24 | 25 | ```matlab 26 | mdl = openAIImages(ModelName="dall-e-2"); 27 | ``` 28 | 29 | Show the image to get variations for. 30 | 31 | ```matlab 32 | imagePath = fullfile(projectDir,"examples","images","bear.png"); 33 | figure 34 | imshow(imagePath) 35 | ``` 36 | 37 | ![figure_0.png](UsingDALLEToEditImages_media/figure_0.png) 38 | 39 | Generate variations for that image. 40 | 41 | ```matlab 42 | [images,resp] = createVariation(mdl, imagePath, NumImages=4); 43 | if ~isempty(images) 44 | tiledlayout('flow') 45 | for ii = 1:numel(images) 46 | nexttile 47 | imshow(images{ii}) 48 | end 49 | else 50 | disp(resp.Body.Data.error) 51 | end 52 | ``` 53 | 54 | ![figure_1.png](UsingDALLEToEditImages_media/figure_1.png) 55 | # Edit an Image with DALL·E 56 | 57 | Use an image containing a mask to apply modifications to the masked area. 58 | 59 | ```matlab 60 | maskImagePath = fullfile(projectDir,"examples","images","mask_bear.png"); 61 | figure 62 | imshow(maskImagePath) 63 | ``` 64 | 65 | ![figure_2.png](UsingDALLEToEditImages_media/figure_2.png) 66 | 67 | Add a swan to the masked area using the function edit. 68 | 69 | ```matlab 70 | [images,resp] = edit(mdl, imagePath, "Swan", MaskImagePath=maskImagePath); 71 | if isfield(resp.Body.Data,'data') 72 | figure 73 | imshow(images{1}); 74 | else 75 | disp(resp.Body.Data.error) 76 | end 77 | ``` 78 | 79 | ![figure_3.png](UsingDALLEToEditImages_media/figure_3.png) 80 | 81 | *Copyright 2024 The MathWorks, Inc.* 82 | 83 | -------------------------------------------------------------------------------- /examples/UsingDALLEToEditImages_media/figure_0.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/matlab-deep-learning/llms-with-matlab/d54de597b9235796f4e05a6bb752f2d1b4cd9c47/examples/UsingDALLEToEditImages_media/figure_0.png -------------------------------------------------------------------------------- /examples/UsingDALLEToEditImages_media/figure_1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/matlab-deep-learning/llms-with-matlab/d54de597b9235796f4e05a6bb752f2d1b4cd9c47/examples/UsingDALLEToEditImages_media/figure_1.png -------------------------------------------------------------------------------- /examples/UsingDALLEToEditImages_media/figure_2.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/matlab-deep-learning/llms-with-matlab/d54de597b9235796f4e05a6bb752f2d1b4cd9c47/examples/UsingDALLEToEditImages_media/figure_2.png -------------------------------------------------------------------------------- /examples/UsingDALLEToEditImages_media/figure_3.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/matlab-deep-learning/llms-with-matlab/d54de597b9235796f4e05a6bb752f2d1b4cd9c47/examples/UsingDALLEToEditImages_media/figure_3.png -------------------------------------------------------------------------------- /examples/UsingDALLEToGenerateImages.md: -------------------------------------------------------------------------------- 1 | 2 | # Using DALL·E™ to generate images 3 | 4 | To run the code shown on this page, open the MLX file in MATLAB®: [mlx-scripts/UsingDALLEToGenerateImages.mlx](mlx-scripts/UsingDALLEToGenerateImages.mlx) 5 | 6 | This example shows how to generate images using the `openAIImages` object. 7 | 8 | 9 | To run this example, you need a valid OpenAI™ API key. Creating images using DALL\-E may incur a fee. 10 | 11 | ```matlab 12 | loadenv(".env") 13 | addpath('../..') 14 | ``` 15 | # Image Generation with DALL·E 3 16 | 17 | Create an `openAIImages` object with `ModelName` `dall-e-3`. 18 | 19 | ```matlab 20 | mdl = openAIImages(ModelName="dall-e-3"); 21 | ``` 22 | 23 | Generate and visualize an image. This model only supports the generation of one image per request. 24 | 25 | ```matlab 26 | images = generate(mdl,"A crispy fresh API key"); 27 | figure 28 | imshow(images{1}) 29 | ``` 30 | 31 | ![figure_0.png](UsingDALLEToGenerateImages_media/figure_0.png) 32 | 33 | You can also define the style and quality of the image 34 | 35 | ```matlab 36 | images = generate(mdl,"A cat playing with yarn", Quality="hd", Style="natural"); 37 | figure 38 | imshow(images{1}) 39 | ``` 40 | 41 | ![figure_1.png](UsingDALLEToGenerateImages_media/figure_1.png) 42 | 43 | *Copyright 2024 The MathWorks, Inc.* 44 | 45 | -------------------------------------------------------------------------------- /examples/UsingDALLEToGenerateImages_media/figure_0.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/matlab-deep-learning/llms-with-matlab/d54de597b9235796f4e05a6bb752f2d1b4cd9c47/examples/UsingDALLEToGenerateImages_media/figure_0.png -------------------------------------------------------------------------------- /examples/UsingDALLEToGenerateImages_media/figure_1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/matlab-deep-learning/llms-with-matlab/d54de597b9235796f4e05a6bb752f2d1b4cd9c47/examples/UsingDALLEToGenerateImages_media/figure_1.png -------------------------------------------------------------------------------- /examples/images/bear.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/matlab-deep-learning/llms-with-matlab/d54de597b9235796f4e05a6bb752f2d1b4cd9c47/examples/images/bear.png -------------------------------------------------------------------------------- /examples/images/mask_bear.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/matlab-deep-learning/llms-with-matlab/d54de597b9235796f4e05a6bb752f2d1b4cd9c47/examples/images/mask_bear.png -------------------------------------------------------------------------------- /examples/mlx-scripts/AnalyzeScientificPapersUsingFunctionCalls.mlx: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/matlab-deep-learning/llms-with-matlab/d54de597b9235796f4e05a6bb752f2d1b4cd9c47/examples/mlx-scripts/AnalyzeScientificPapersUsingFunctionCalls.mlx -------------------------------------------------------------------------------- /examples/mlx-scripts/AnalyzeSentimentinTextUsingChatGPTwithStructuredOutput.mlx: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/matlab-deep-learning/llms-with-matlab/d54de597b9235796f4e05a6bb752f2d1b4cd9c47/examples/mlx-scripts/AnalyzeSentimentinTextUsingChatGPTwithStructuredOutput.mlx -------------------------------------------------------------------------------- /examples/mlx-scripts/AnalyzeTextDataUsingParallelFunctionCallwithChatGPT.mlx: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/matlab-deep-learning/llms-with-matlab/d54de597b9235796f4e05a6bb752f2d1b4cd9c47/examples/mlx-scripts/AnalyzeTextDataUsingParallelFunctionCallwithChatGPT.mlx -------------------------------------------------------------------------------- /examples/mlx-scripts/AnalyzeTextDataUsingParallelFunctionCallwithOllama.mlx: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/matlab-deep-learning/llms-with-matlab/d54de597b9235796f4e05a6bb752f2d1b4cd9c47/examples/mlx-scripts/AnalyzeTextDataUsingParallelFunctionCallwithOllama.mlx -------------------------------------------------------------------------------- /examples/mlx-scripts/CreateSimpleChatBot.mlx: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/matlab-deep-learning/llms-with-matlab/d54de597b9235796f4e05a6bb752f2d1b4cd9c47/examples/mlx-scripts/CreateSimpleChatBot.mlx -------------------------------------------------------------------------------- /examples/mlx-scripts/CreateSimpleOllamaChatBot.mlx: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/matlab-deep-learning/llms-with-matlab/d54de597b9235796f4e05a6bb752f2d1b4cd9c47/examples/mlx-scripts/CreateSimpleOllamaChatBot.mlx -------------------------------------------------------------------------------- /examples/mlx-scripts/DescribeImagesUsingChatGPT.mlx: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/matlab-deep-learning/llms-with-matlab/d54de597b9235796f4e05a6bb752f2d1b4cd9c47/examples/mlx-scripts/DescribeImagesUsingChatGPT.mlx -------------------------------------------------------------------------------- /examples/mlx-scripts/InformationRetrievalUsingOpenAIDocumentEmbedding.mlx: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/matlab-deep-learning/llms-with-matlab/d54de597b9235796f4e05a6bb752f2d1b4cd9c47/examples/mlx-scripts/InformationRetrievalUsingOpenAIDocumentEmbedding.mlx -------------------------------------------------------------------------------- /examples/mlx-scripts/ProcessGeneratedTextInRealTimeByUsingOllamaInStreamingMode.mlx: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/matlab-deep-learning/llms-with-matlab/d54de597b9235796f4e05a6bb752f2d1b4cd9c47/examples/mlx-scripts/ProcessGeneratedTextInRealTimeByUsingOllamaInStreamingMode.mlx -------------------------------------------------------------------------------- /examples/mlx-scripts/ProcessGeneratedTextinRealTimebyUsingChatGPTinStreamingMode.mlx: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/matlab-deep-learning/llms-with-matlab/d54de597b9235796f4e05a6bb752f2d1b4cd9c47/examples/mlx-scripts/ProcessGeneratedTextinRealTimebyUsingChatGPTinStreamingMode.mlx -------------------------------------------------------------------------------- /examples/mlx-scripts/RetrievalAugmentedGenerationUsingChatGPTandMATLAB.mlx: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/matlab-deep-learning/llms-with-matlab/d54de597b9235796f4e05a6bb752f2d1b4cd9c47/examples/mlx-scripts/RetrievalAugmentedGenerationUsingChatGPTandMATLAB.mlx -------------------------------------------------------------------------------- /examples/mlx-scripts/RetrievalAugmentedGenerationUsingOllamaAndMATLAB.mlx: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/matlab-deep-learning/llms-with-matlab/d54de597b9235796f4e05a6bb752f2d1b4cd9c47/examples/mlx-scripts/RetrievalAugmentedGenerationUsingOllamaAndMATLAB.mlx -------------------------------------------------------------------------------- /examples/mlx-scripts/SummarizeLargeDocumentsUsingChatGPTandMATLAB.mlx: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/matlab-deep-learning/llms-with-matlab/d54de597b9235796f4e05a6bb752f2d1b4cd9c47/examples/mlx-scripts/SummarizeLargeDocumentsUsingChatGPTandMATLAB.mlx -------------------------------------------------------------------------------- /examples/mlx-scripts/UsingDALLEToEditImages.mlx: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/matlab-deep-learning/llms-with-matlab/d54de597b9235796f4e05a6bb752f2d1b4cd9c47/examples/mlx-scripts/UsingDALLEToEditImages.mlx -------------------------------------------------------------------------------- /examples/mlx-scripts/UsingDALLEToGenerateImages.mlx: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/matlab-deep-learning/llms-with-matlab/d54de597b9235796f4e05a6bb752f2d1b4cd9c47/examples/mlx-scripts/UsingDALLEToGenerateImages.mlx -------------------------------------------------------------------------------- /extractOpenAIEmbeddings.m: -------------------------------------------------------------------------------- 1 | function [emb, response] = extractOpenAIEmbeddings(text, nvp) 2 | % EXTRACTOPENAIEMBEDDINGS Generate text embeddings using the OpenAI API 3 | % 4 | % emb = EXTRACTOPENAIEMBEDDINGS(text) generates an embedding of the input 5 | % TEXT using the OpenAI API. 6 | % 7 | % emb = EXTRACTOPENAIEMBEDDINGS(text,Name=Value) specifies optional 8 | % specifies additional options using one or more name-value pairs: 9 | % 10 | % 'ModelName' - The ID of the model to use. 11 | % 12 | % 'APIKey' - OpenAI API token. It can also be specified by 13 | % setting the environment variable OPENAI_API_KEY 14 | % 15 | % 'TimeOut' - Connection Timeout in seconds (default: 10 secs) 16 | % 17 | % 'Dimensions' - Number of dimensions the resulting output 18 | % embeddings should have. 19 | % 20 | % [emb, response] = EXTRACTOPENAIEMBEDDINGS(...) also returns the full 21 | % response from the OpenAI API call. 22 | % 23 | % Copyright 2023-2024 The MathWorks, Inc. 24 | 25 | arguments 26 | text (1,:) {mustBeNonzeroLengthText} 27 | nvp.ModelName (1,1) string {mustBeMember(nvp.ModelName,["text-embedding-ada-002", ... 28 | "text-embedding-3-large", "text-embedding-3-small"])} = "text-embedding-ada-002" 29 | nvp.TimeOut (1,1) {mustBeNumeric,mustBeReal,mustBePositive} = 10 30 | nvp.Dimensions (1,1) {mustBeNumeric,mustBeInteger,mustBePositive} 31 | nvp.APIKey {llms.utils.mustBeNonzeroLengthTextScalar} 32 | end 33 | 34 | END_POINT = "https://api.openai.com/v1/embeddings"; 35 | 36 | key = llms.internal.getApiKeyFromNvpOrEnv(nvp,"OPENAI_API_KEY"); 37 | text = convertCharsToStrings(text); 38 | 39 | parameters = struct("input",text,"model",nvp.ModelName); 40 | 41 | if isfield(nvp, "Dimensions") 42 | if nvp.ModelName=="text-embedding-ada-002" 43 | error("llms:invalidOptionForModel", ... 44 | llms.utils.errorMessageCatalog.getMessage("llms:invalidOptionForModel", "Dimensions", nvp.ModelName)); 45 | end 46 | mustBeCorrectDimensions(nvp.Dimensions,nvp.ModelName); 47 | parameters.dimensions = nvp.Dimensions; 48 | end 49 | 50 | 51 | response = llms.internal.sendRequestWrapper(parameters,key, END_POINT, nvp.TimeOut); 52 | 53 | if isfield(response.Body.Data, "data") 54 | emb = [response.Body.Data.data.embedding]; 55 | emb = emb'; 56 | else 57 | emb = []; 58 | end 59 | end 60 | 61 | function mustBeCorrectDimensions(dimensions,modelName) 62 | model2dim = .... 63 | dictionary(["text-embedding-3-large", "text-embedding-3-small"], ... 64 | [3072,1536]); 65 | 66 | mustBeNumeric(dimensions); 67 | if dimensions>model2dim(modelName) 68 | error("llms:dimensionsMustBeSmallerThan", ... 69 | llms.utils.errorMessageCatalog.getMessage("llms:dimensionsMustBeSmallerThan", ... 70 | string(model2dim(modelName)))); 71 | end 72 | end -------------------------------------------------------------------------------- /license.txt: -------------------------------------------------------------------------------- 1 | Copyright (c) 2023, The MathWorks, Inc. 2 | All rights reserved. 3 | Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: 4 | 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. 5 | 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. 6 | 3. In all cases, the software is, and all modifications and derivatives of the software shall be, licensed to you solely for use in conjunction with MathWorks products and service offerings. 7 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. -------------------------------------------------------------------------------- /openAIMessages.m: -------------------------------------------------------------------------------- 1 | function msgs = openAIMessages 2 | %openAIMessages - backward compatibility function 3 | % 4 | % This function only exists for backward compatibility and will be removed 5 | % at some time in the future. Please use messageHistory instead. 6 | 7 | % Copyright 2024 The MathWorks, Inc. 8 | 9 | msgs = messageHistory; 10 | end 11 | -------------------------------------------------------------------------------- /tests/hstructuredOutput.m: -------------------------------------------------------------------------------- 1 | classdef (Abstract) hstructuredOutput < matlab.unittest.TestCase 2 | % Tests for completion APIs providing structured output 3 | 4 | % Copyright 2023-2025 The MathWorks, Inc. 5 | 6 | properties(Abstract) 7 | structuredModel 8 | end 9 | 10 | methods(Test) 11 | % Test methods 12 | function generateWithStructuredOutput(testCase) 13 | import matlab.unittest.constraints.ContainsSubstring 14 | import matlab.unittest.constraints.StartsWithSubstring 15 | res = generate(testCase.structuredModel,"Which animal produces honey?",... 16 | ResponseFormat = struct(commonName = "dog", scientificName = "Canis familiaris")); 17 | testCase.assertClass(res,"struct"); 18 | testCase.verifySize(fieldnames(res),[2,1]); 19 | testCase.verifyThat(lower(res.commonName), ContainsSubstring("bee")); 20 | testCase.verifyThat(res.scientificName, StartsWithSubstring("Apis")); 21 | end 22 | 23 | function generateListWithStructuredOutput(testCase) 24 | prototype = struct("plantName",{"appletree","pear"}, ... 25 | "fruit",{"apple","pear"}, ... 26 | "edible",[true,true], ... 27 | "ignore", missing); 28 | res = generate(testCase.structuredModel,"What is harvested in August?", ResponseFormat = prototype); 29 | testCase.verifyCompatibleStructs(res, prototype); 30 | end 31 | 32 | function generateWithNestedStructs(testCase) 33 | stepsPrototype = struct("explanation",{"a","b"},"assumptions",{"a","b"}); 34 | prototype = struct("steps",stepsPrototype,"final_answer","a"); 35 | res = generate(testCase.structuredModel,"What is the positive root of x^2-2*x+1?", ... 36 | ResponseFormat=prototype); 37 | testCase.verifyCompatibleStructs(res,prototype); 38 | end 39 | 40 | function incompleteJSONResponse(testCase) 41 | country = ["USA";"UK"]; 42 | capital = ["Washington, D.C.";"London"]; 43 | population = [345716792;69203012]; 44 | prototype = struct("country",country,"capital",capital,"population",population); 45 | 46 | testCase.verifyError(@() generate(testCase.structuredModel, ... 47 | "What are the five largest countries whose English names" + ... 48 | " start with the letter A?", ... 49 | ResponseFormat = prototype, MaxNumTokens=3), "llms:apiReturnedIncompleteJSON"); 50 | end 51 | 52 | function generateWithExplicitSchema(testCase) 53 | import matlab.unittest.constraints.IsSameSetAs 54 | schema = iGetSchema(); 55 | 56 | genUser = generate(testCase.structuredModel,"Create a sample user",ResponseFormat=schema); 57 | testCase.verifyClass(genUser,"string"); 58 | genUserDecoded = jsondecode(genUser); 59 | testCase.verifyClass(genUserDecoded.item,"struct"); 60 | testCase.verifyThat(fieldnames(genUserDecoded.item),... 61 | IsSameSetAs({'name','age'}) | IsSameSetAs({'number','street','city'})); 62 | end 63 | end 64 | 65 | methods 66 | function verifyCompatibleStructs(testCase,data,prototype) 67 | testCase.assertClass(data,"struct"); 68 | testCase.assertClass(prototype,"struct"); 69 | arrayfun(@(d) testCase.verifyCompatibleStructsScalar(d,prototype(1)), data); 70 | end 71 | 72 | function verifyCompatibleStructsScalar(testCase,data,prototype) 73 | import matlab.unittest.constraints.IsSameSetAs 74 | testCase.assertClass(data,"struct"); 75 | testCase.assertClass(prototype,"struct"); 76 | testCase.assertThat(fieldnames(data),IsSameSetAs(fieldnames(prototype))); 77 | for name = fieldnames(data).' 78 | field = name{1}; 79 | testCase.verifyClass(data.(field),class(prototype.(field))); 80 | if isstruct(data.(field)) 81 | testCase.verifyCompatibleStructs(data.(field),prototype.(field)); 82 | end 83 | end 84 | end 85 | end 86 | end 87 | 88 | function str = iGetSchema() 89 | % an example from https://platform.openai.com/docs/guides/structured-outputs/supported-schemas 90 | str = string(join({ 91 | '{' 92 | ' "type": "object",' 93 | ' "properties": {' 94 | ' "item": {' 95 | ' "anyOf": [' 96 | ' {' 97 | ' "type": "object",' 98 | ' "description": "The user object to insert into the database",' 99 | ' "properties": {' 100 | ' "name": {' 101 | ' "type": "string",' 102 | ' "description": "The name of the user"' 103 | ' },' 104 | ' "age": {' 105 | ' "type": "number",' 106 | ' "description": "The age of the user"' 107 | ' }' 108 | ' },' 109 | ' "additionalProperties": false,' 110 | ' "required": [' 111 | ' "name",' 112 | ' "age"' 113 | ' ]' 114 | ' },' 115 | ' {' 116 | ' "type": "object",' 117 | ' "description": "The address object to insert into the database",' 118 | ' "properties": {' 119 | ' "number": {' 120 | ' "type": "string",' 121 | ' "description": "The number of the address. Eg. for 123 main st, this would be 123"' 122 | ' },' 123 | ' "street": {' 124 | ' "type": "string",' 125 | ' "description": "The street name. Eg. for 123 main st, this would be main st"' 126 | ' },' 127 | ' "city": {' 128 | ' "type": "string",' 129 | ' "description": "The city of the address"' 130 | ' }' 131 | ' },' 132 | ' "additionalProperties": false,' 133 | ' "required": [' 134 | ' "number",' 135 | ' "street",' 136 | ' "city"' 137 | ' ]' 138 | ' }' 139 | ' ]' 140 | ' }' 141 | ' },' 142 | ' "additionalProperties": false,' 143 | ' "required": [' 144 | ' "item"' 145 | ' ]' 146 | '}' 147 | }, newline)); 148 | end 149 | -------------------------------------------------------------------------------- /tests/htoolCalls.m: -------------------------------------------------------------------------------- 1 | classdef (Abstract) htoolCalls < matlab.unittest.TestCase 2 | % Tests for backends with tool calls 3 | 4 | % Copyright 2023-2025 The MathWorks, Inc. 5 | properties(Abstract) 6 | constructor 7 | defaultModel 8 | end 9 | 10 | methods(Test) 11 | function errorsWhenPassingToolChoiceWithEmptyTools(testCase) 12 | testCase.verifyError(@()generate(testCase.defaultModel,"input", ToolChoice="bla"), "llms:mustSetFunctionsForCall"); 13 | end 14 | 15 | function settingToolChoiceWithNone(testCase) 16 | functions = openAIFunction("funName"); 17 | chat = testCase.constructor(Tools=functions); 18 | 19 | testCase.verifyWarningFree(@()generate(chat,"This is okay","ToolChoice","none")); 20 | end 21 | 22 | function generateWithToolsAndStreamFunc(testCase) 23 | import matlab.unittest.constraints.HasField 24 | 25 | f = openAIFunction("writePaperDetails", "Function to write paper details to a table."); 26 | f = addParameter(f, "name", type="string", description="Name of the paper."); 27 | f = addParameter(f, "url", type="string", description="URL containing the paper."); 28 | f = addParameter(f, "explanation", type="string", description="Explanation on why the paper is related to the given topic."); 29 | 30 | paperExtractor = testCase.constructor( ... 31 | "You are an expert in extracting information from a paper.", ... 32 | Tools=f, StreamFun=@(s) s); 33 | 34 | input = join([ 35 | " http://arxiv.org/abs/2406.04344v1" 36 | " 2024-06-06T17:59:56Z" 37 | " 2024-06-06T17:59:56Z" 38 | " Verbalized Machine Learning: Revisiting Machine Learning with Language" 39 | " Models" 40 | " Motivated by the large progress made by large language models (LLMs), we" 41 | "introduce the framework of verbalized machine learning (VML). In contrast to" 42 | "conventional machine learning models that are typically optimized over a" 43 | "continuous parameter space, VML constrains the parameter space to be" 44 | "human-interpretable natural language. Such a constraint leads to a new" 45 | "perspective of function approximation, where an LLM with a text prompt can be" 46 | "viewed as a function parameterized by the text prompt. Guided by this" 47 | "perspective, we revisit classical machine learning problems, such as regression" 48 | "and classification, and find that these problems can be solved by an" 49 | "LLM-parameterized learner and optimizer. The major advantages of VML include" 50 | "(1) easy encoding of inductive bias: prior knowledge about the problem and" 51 | "hypothesis class can be encoded in natural language and fed into the" 52 | "LLM-parameterized learner; (2) automatic model class selection: the optimizer" 53 | "can automatically select a concrete model class based on data and verbalized" 54 | "prior knowledge, and it can update the model class during training; and (3)" 55 | "interpretable learner updates: the LLM-parameterized optimizer can provide" 56 | "explanations for why each learner update is performed. We conduct several" 57 | "studies to empirically evaluate the effectiveness of VML, and hope that VML can" 58 | "serve as a stepping stone to stronger interpretability and trustworthiness in" 59 | "ML." 60 | "" 61 | " " 62 | " Tim Z. Xiao" 63 | " " 64 | " " 65 | " Robert Bamler" 66 | " " 67 | " " 68 | " Bernhard Schölkopf" 69 | " " 70 | " " 71 | " Weiyang Liu" 72 | " " 73 | " Technical Report v1 (92 pages, 15 figures)" 74 | " " 75 | " " 76 | " " 77 | " " 78 | " " 79 | " " 80 | ], newline); 81 | 82 | topic = "Large Language Models"; 83 | 84 | prompt = "Given the following paper:" + newline + string(input)+ newline +... 85 | "Given the topic: "+ topic + newline + "Write the details to a table."; 86 | [~, response] = generate(paperExtractor, prompt); 87 | 88 | testCase.assertThat(response, HasField("tool_calls")); 89 | testCase.verifyEqual(response.tool_calls.type,'function'); 90 | testCase.verifyEqual(response.tool_calls.function.name,'writePaperDetails'); 91 | data = testCase.verifyWarningFree( ... 92 | @() jsondecode(response.tool_calls.function.arguments)); 93 | testCase.verifyThat(data,HasField("name")); 94 | testCase.verifyThat(data,HasField("url")); 95 | testCase.verifyThat(data,HasField("explanation")); 96 | end 97 | end 98 | end 99 | -------------------------------------------------------------------------------- /tests/private/recording-doubles/+llms/+internal/sendRequestWrapper.m: -------------------------------------------------------------------------------- 1 | function [response, streamedText] = sendRequestWrapper(parameters, token, varargin) 2 | % This function is undocumented and will change in a future release 3 | 4 | % A wrapper around sendRequest to have a test seam 5 | persistent seenCalls 6 | if isempty(seenCalls) 7 | seenCalls = cell(0,2); 8 | end 9 | 10 | persistent filename 11 | 12 | if nargin == 1 && isequal(parameters,"close") 13 | save(filename+".mat","seenCalls"); 14 | seenCalls = cell(0,2); 15 | return 16 | end 17 | 18 | if nargin==2 && isequal(parameters,"open") 19 | filename = token; 20 | return 21 | end 22 | 23 | streamFunCalls = {}; 24 | hasCallback = nargin >= 5 && isa(varargin{3},'function_handle'); 25 | if hasCallback 26 | streamFun = varargin{3}; 27 | end 28 | function wrappedStreamFun(varargin) 29 | streamFunCalls(end+1) = varargin; 30 | streamFun(varargin{:}); 31 | end 32 | if hasCallback 33 | varargin{3} = @wrappedStreamFun; 34 | end 35 | 36 | 37 | [response, streamedText] = llms.internal.sendRequest(parameters, token, varargin{:}); 38 | 39 | seenCalls(end+1,:) = {{parameters},{response,streamFunCalls,streamedText}}; 40 | end 41 | -------------------------------------------------------------------------------- /tests/private/recording-doubles/addpath.m: -------------------------------------------------------------------------------- 1 | function addpath(~) 2 | % ignore addpath calls in examples 3 | -------------------------------------------------------------------------------- /tests/private/replaying-doubles/+llms/+internal/sendRequestWrapper.m: -------------------------------------------------------------------------------- 1 | function [response, streamedText] = sendRequestWrapper(parameters, token, varargin) 2 | % This function is undocumented and will change in a future release 3 | 4 | % A wrapper around sendRequest to have a test seam 5 | persistent seenCalls 6 | if isempty(seenCalls) 7 | seenCalls = cell(0,2); 8 | end 9 | 10 | if nargin == 1 && isequal(parameters,"close") 11 | seenCalls = cell(0,2); 12 | return 13 | end 14 | 15 | if nargin==2 && isequal(parameters,"open") 16 | load(token+".mat","seenCalls"); 17 | return 18 | end 19 | 20 | result = seenCalls{1,2}; 21 | response = result{1}; 22 | streamFunCalls = result{2}; 23 | streamedText = result{3}; 24 | 25 | if nargin >= 5 && isa(varargin{3},'function_handle') 26 | streamFun = varargin{3}; 27 | cellfun(streamFun, streamFunCalls); 28 | end 29 | 30 | seenCalls(1,:) = []; 31 | -------------------------------------------------------------------------------- /tests/private/replaying-doubles/addpath.m: -------------------------------------------------------------------------------- 1 | function addpath(~) 2 | % ignore addpath calls in examples 3 | -------------------------------------------------------------------------------- /tests/recordings/AnalyzeScientificPapersUsingFunctionCalls.mat: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/matlab-deep-learning/llms-with-matlab/d54de597b9235796f4e05a6bb752f2d1b4cd9c47/tests/recordings/AnalyzeScientificPapersUsingFunctionCalls.mat -------------------------------------------------------------------------------- /tests/recordings/AnalyzeSentimentinTextUsingChatGPTwithStructuredOutput.mat: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/matlab-deep-learning/llms-with-matlab/d54de597b9235796f4e05a6bb752f2d1b4cd9c47/tests/recordings/AnalyzeSentimentinTextUsingChatGPTwithStructuredOutput.mat -------------------------------------------------------------------------------- /tests/recordings/AnalyzeTextDataUsingParallelFunctionCallwithChatGPT.mat: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/matlab-deep-learning/llms-with-matlab/d54de597b9235796f4e05a6bb752f2d1b4cd9c47/tests/recordings/AnalyzeTextDataUsingParallelFunctionCallwithChatGPT.mat -------------------------------------------------------------------------------- /tests/recordings/AnalyzeTextDataUsingParallelFunctionCallwithOllama.mat: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/matlab-deep-learning/llms-with-matlab/d54de597b9235796f4e05a6bb752f2d1b4cd9c47/tests/recordings/AnalyzeTextDataUsingParallelFunctionCallwithOllama.mat -------------------------------------------------------------------------------- /tests/recordings/CreateSimpleChatBot.mat: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/matlab-deep-learning/llms-with-matlab/d54de597b9235796f4e05a6bb752f2d1b4cd9c47/tests/recordings/CreateSimpleChatBot.mat -------------------------------------------------------------------------------- /tests/recordings/CreateSimpleOllamaChatBot.mat: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/matlab-deep-learning/llms-with-matlab/d54de597b9235796f4e05a6bb752f2d1b4cd9c47/tests/recordings/CreateSimpleOllamaChatBot.mat -------------------------------------------------------------------------------- /tests/recordings/DescribeImagesUsingChatGPT.mat: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/matlab-deep-learning/llms-with-matlab/d54de597b9235796f4e05a6bb752f2d1b4cd9c47/tests/recordings/DescribeImagesUsingChatGPT.mat -------------------------------------------------------------------------------- /tests/recordings/InformationRetrievalUsingOpenAIDocumentEmbedding.mat: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/matlab-deep-learning/llms-with-matlab/d54de597b9235796f4e05a6bb752f2d1b4cd9c47/tests/recordings/InformationRetrievalUsingOpenAIDocumentEmbedding.mat -------------------------------------------------------------------------------- /tests/recordings/ProcessGeneratedTextInRealTimeByUsingOllamaInStreamingMode.mat: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/matlab-deep-learning/llms-with-matlab/d54de597b9235796f4e05a6bb752f2d1b4cd9c47/tests/recordings/ProcessGeneratedTextInRealTimeByUsingOllamaInStreamingMode.mat -------------------------------------------------------------------------------- /tests/recordings/ProcessGeneratedTextinRealTimebyUsingChatGPTinStreamingMode.mat: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/matlab-deep-learning/llms-with-matlab/d54de597b9235796f4e05a6bb752f2d1b4cd9c47/tests/recordings/ProcessGeneratedTextinRealTimebyUsingChatGPTinStreamingMode.mat -------------------------------------------------------------------------------- /tests/recordings/README.md: -------------------------------------------------------------------------------- 1 | # Test Double Recordings 2 | 3 | Testing the examples typically takes a long time and tends to have false negatives relatively often, mostly due to timeout errors. 4 | 5 | The point of testing the examples is not to test that we can connect to the servers. We have other test points for that. Hence, we insert a “test double” while testing the examples that keeps recordings of previous interactions with the servers and just replays the responses. 6 | 7 | This directory contains those recordings. 8 | 9 | ## Generating Recordings 10 | 11 | To generate or re-generate recordings (e.g., after changing an example, or making relevant software changes), open [`texampleTests.m`](../texampleTests.m) and in `setUpAndTearDowns`, change `capture = false;` to `capture = true;`. Then, run the test points relevant to the example(s) in question, and change `capture` back to `false`. 12 | 13 | -------------------------------------------------------------------------------- /tests/recordings/RetrievalAugmentedGenerationUsingChatGPTandMATLAB.mat: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/matlab-deep-learning/llms-with-matlab/d54de597b9235796f4e05a6bb752f2d1b4cd9c47/tests/recordings/RetrievalAugmentedGenerationUsingChatGPTandMATLAB.mat -------------------------------------------------------------------------------- /tests/recordings/RetrievalAugmentedGenerationUsingOllamaAndMATLAB.mat: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/matlab-deep-learning/llms-with-matlab/d54de597b9235796f4e05a6bb752f2d1b4cd9c47/tests/recordings/RetrievalAugmentedGenerationUsingOllamaAndMATLAB.mat -------------------------------------------------------------------------------- /tests/recordings/SummarizeLargeDocumentsUsingChatGPTandMATLAB.mat: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/matlab-deep-learning/llms-with-matlab/d54de597b9235796f4e05a6bb752f2d1b4cd9c47/tests/recordings/SummarizeLargeDocumentsUsingChatGPTandMATLAB.mat -------------------------------------------------------------------------------- /tests/recordings/UsingDALLEToEditImages.mat: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/matlab-deep-learning/llms-with-matlab/d54de597b9235796f4e05a6bb752f2d1b4cd9c47/tests/recordings/UsingDALLEToEditImages.mat -------------------------------------------------------------------------------- /tests/recordings/UsingDALLEToGenerateImages.mat: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/matlab-deep-learning/llms-with-matlab/d54de597b9235796f4e05a6bb752f2d1b4cd9c47/tests/recordings/UsingDALLEToGenerateImages.mat -------------------------------------------------------------------------------- /tests/terrorMessageCatalog.m: -------------------------------------------------------------------------------- 1 | classdef terrorMessageCatalog < matlab.unittest.TestCase 2 | % Tests for errorMessageCatalog 3 | 4 | % Copyright 2024 The MathWorks, Inc. 5 | 6 | methods(Test) 7 | function ensureCorrectCoverage(testCase) 8 | testCase.verifyClass( ... 9 | llms.utils.errorMessageCatalog.createCatalog,"dictionary"); 10 | end 11 | 12 | function holeValuesAreUsed(testCase) 13 | import matlab.unittest.constraints.IsEqualTo 14 | 15 | % we do not check the whole string, because error message 16 | % text *should* be able to change without test points changing. 17 | % That is necessary to enable localization. 18 | messageID = "llms:mustBeValidIndex"; 19 | 20 | message1 = llms.utils.errorMessageCatalog.getMessage(messageID, "input1"); 21 | message2 = llms.utils.errorMessageCatalog.getMessage(messageID, "input2"); 22 | 23 | testCase.verifyThat(message1, ~IsEqualTo(message2)); 24 | testCase.verifyThat(replace(message1, "input1", "input2"), IsEqualTo(message2)); 25 | end 26 | end 27 | end 28 | -------------------------------------------------------------------------------- /tests/test_files/solar.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/matlab-deep-learning/llms-with-matlab/d54de597b9235796f4e05a6bb752f2d1b4cd9c47/tests/test_files/solar.png -------------------------------------------------------------------------------- /tests/textractOpenAIEmbeddings.m: -------------------------------------------------------------------------------- 1 | classdef textractOpenAIEmbeddings < matlab.unittest.TestCase 2 | % Tests for extractOpenAIEmbeddings 3 | 4 | % Copyright 2023-2024 The MathWorks, Inc. 5 | 6 | properties(TestParameter) 7 | InvalidInput = iGetInvalidInput(); 8 | ValidInput = iGetValidInput(); 9 | ValidDimensionsModelCombinations = iGetValidDimensionsModelCombinations(); 10 | end 11 | 12 | methods(Test) 13 | % Test methods 14 | function embedsDifferentStringTypes(testCase) 15 | testCase.verifyWarningFree(@()extractOpenAIEmbeddings("bla", APIKey="this-is-not-a-real-key")); 16 | testCase.verifyWarningFree(@()extractOpenAIEmbeddings('bla', APIKey="this-is-not-a-real-key")); 17 | testCase.verifyWarningFree(@()extractOpenAIEmbeddings({'bla'}, APIKey="this-is-not-a-real-key")); 18 | end 19 | 20 | function keyNotFound(testCase) 21 | % Ensures key is not in environment variable for tests 22 | openAIEnvVar = "OPENAI_API_KEY"; 23 | if isenv(openAIEnvVar) 24 | key = getenv(openAIEnvVar); 25 | reset = onCleanup(@() setenv(openAIEnvVar, key)); 26 | unsetenv(openAIEnvVar); 27 | end 28 | testCase.verifyError(@()extractOpenAIEmbeddings("bla"), "llms:keyMustBeSpecified"); 29 | end 30 | 31 | function validCombinationOfModelAndDimension(testCase, ValidDimensionsModelCombinations) 32 | testCase.verifyWarningFree(@()extractOpenAIEmbeddings("bla", ... 33 | Dimensions=ValidDimensionsModelCombinations.Dimensions,... 34 | ModelName=ValidDimensionsModelCombinations.ModelName, ... 35 | APIKey="not-real")); 36 | end 37 | 38 | function embedTextWithSuccessfulOpenAICall(testCase,ValidInput) 39 | result = testCase.verifyWarningFree(@()extractOpenAIEmbeddings(ValidInput.Input{:})); 40 | testCase.verifySize(result, ValidInput.ExpectedSize); 41 | end 42 | 43 | function invalidCombinationOfModelAndDimension(testCase) 44 | testCase.verifyError(@()extractOpenAIEmbeddings("bla", ... 45 | Dimensions=10,... 46 | ModelName="text-embedding-ada-002", ... 47 | APIKey="not-real"), ... 48 | "llms:invalidOptionForModel") 49 | end 50 | 51 | function useAllNVP(testCase) 52 | testCase.verifyWarningFree(@()extractOpenAIEmbeddings("bla", ModelName="text-embedding-ada-002", ... 53 | APIKey="this-is-not-a-real-key", TimeOut=10)); 54 | end 55 | 56 | function testInvalidInputs(testCase, InvalidInput) 57 | testCase.verifyError(@()extractOpenAIEmbeddings(InvalidInput.Input{:}), InvalidInput.Error); 58 | end 59 | end 60 | end 61 | 62 | function validInput = iGetValidInput() 63 | validInput = struct( ... 64 | "ScalarString", struct( ... 65 | "Input",{{ "blah" }}, ... 66 | "ExpectedSize",[1,1536]), ... 67 | "StringVector", struct( ... 68 | "Input",{{ ["a", "b", "c"] }}, ... 69 | "ExpectedSize",[3,1536]), ... 70 | "CharVector", struct( ... 71 | "Input", {{ 'foo' }}, ... 72 | "ExpectedSize",[1,1536]), ... 73 | "Cellstr", struct( ... 74 | "Input",{{ {'cat', 'dog', 'mouse'} }}, ... 75 | "ExpectedSize",[3,1536]), ... 76 | "ModelAsString", struct( ... 77 | "Input",{{ "foo","ModelName","text-embedding-3-small" }}, ... 78 | "ExpectedSize",[1,1536]), ... 79 | "ModelAsChar", struct( ... 80 | "Input",{{ "foo","ModelName",'text-embedding-3-small' }}, ... 81 | "ExpectedSize",[1,1536]), ... 82 | "ModelAsCellstr", struct( ... 83 | "Input",{{ "foo","ModelName",{'text-embedding-3-small'} }}, ... 84 | "ExpectedSize",[1,1536])); 85 | end 86 | 87 | function invalidInput = iGetInvalidInput() 88 | invalidInput = struct( ... 89 | "InvalidEmptyText", struct( ... 90 | "Input",{{ "" }},... 91 | "Error", "MATLAB:validators:mustBeNonzeroLengthText"), ... 92 | ... 93 | "InvalidEmptyTextArray", struct( ... 94 | "Input",{{ ["", ""] }},... 95 | "Error", "MATLAB:validators:mustBeNonzeroLengthText"), ... 96 | ... 97 | "InvalidTimeOutType", struct( ... 98 | "Input",{{ "bla", "TimeOut", "2" }},... 99 | "Error", "MATLAB:validators:mustBeNumeric"), ... 100 | ... 101 | "InvalidTimeOutSize", struct( ... 102 | "Input",{{ "bla", "TimeOut", [1 1 1] }},... 103 | "Error", "MATLAB:validation:IncompatibleSize"), ... 104 | ... 105 | "WrongTypeText",struct( ... 106 | "Input",{{ 123 }},... 107 | "Error","MATLAB:validators:mustBeNonzeroLengthText"),... 108 | ... 109 | "InvalidModelNameType",struct( ... 110 | "Input",{{"bla", "ModelName", 0 }},... 111 | "Error","MATLAB:validators:mustBeMember"),... 112 | ... 113 | "InvalidModelNameSize",struct( ... 114 | "Input",{{"bla", "ModelName", ["gpt-3.5-turbo", "gpt-3.5-turbo"] }},... 115 | "Error","MATLAB:validation:IncompatibleSize"),... 116 | ... 117 | "InvalidModelNameOption",struct( ... 118 | "Input",{{"bla", "ModelName", "gpt" }},... 119 | "Error","MATLAB:validators:mustBeMember"),... 120 | ... 121 | "InvalidDimensionType",struct( ... 122 | "Input",{{"bla", "Dimensions", "123" }},... 123 | "Error","MATLAB:validators:mustBeNumeric"),... 124 | ... 125 | "InvalidDimensionValue",struct( ... 126 | "Input",{{"bla", "Dimensions", "-11" }},... 127 | "Error","MATLAB:validators:mustBeNumeric"),... 128 | ... 129 | "LargeDimensionValueForModelLarge",struct( ... 130 | "Input",{{"bla", "ModelName", "text-embedding-3-large", ... 131 | "Dimensions", 3073, "APIKey", "fake-key" }},... 132 | "Error","llms:dimensionsMustBeSmallerThan"),... 133 | ... 134 | "LargeDimensionValueForModelSmall",struct( ... 135 | "Input",{{"bla", "ModelName", "text-embedding-3-small", ... 136 | "Dimensions", 1537, "APIKey", "fake-key" }},... 137 | "Error","llms:dimensionsMustBeSmallerThan"),... 138 | ... 139 | "InvalidDimensionSize",struct( ... 140 | "Input",{{"bla", "Dimensions", [123, 123] }},... 141 | "Error","MATLAB:validation:IncompatibleSize"),... 142 | ... 143 | "InvalidApiKeyType",struct( ... 144 | "Input",{{"bla", "APIKey" 123 }},... 145 | "Error","MATLAB:validators:mustBeNonzeroLengthText"),... 146 | ... 147 | "InvalidApiKeySize",struct( ... 148 | "Input",{{"bla", "APIKey" ["abc" "abc"] }},... 149 | "Error","MATLAB:validators:mustBeTextScalar")); 150 | end 151 | 152 | function validDimensionsModelCombinations = iGetValidDimensionsModelCombinations() 153 | validDimensionsModelCombinations = struct( ... 154 | "CaseTextEmbedding3Small", struct( ... 155 | "Dimensions",10,... 156 | "ModelName", "text-embedding-3-small"), ... 157 | ... 158 | "CaseTextEmbedding3Large", struct( ... 159 | "Dimensions",10,... 160 | "ModelName", "text-embedding-3-large")); 161 | end 162 | -------------------------------------------------------------------------------- /tests/tjsonSchemaFromPrototype.m: -------------------------------------------------------------------------------- 1 | classdef tjsonSchemaFromPrototype < matlab.unittest.TestCase 2 | % Unit tests for llms.internal.jsonSchemaFromPrototype and llms.jsonSchemaFromPrototype 3 | 4 | % Copyright 2024 The MathWorks, Inc. 5 | 6 | methods (Test) 7 | function simpleExample(testCase) 8 | import matlab.unittest.constraints.IsSameSetAs 9 | 10 | prototype = struct("name","James","age",20); 11 | schema = llms.internal.jsonSchemaFromPrototype(prototype); 12 | 13 | testCase.assertClassSchema(schema) 14 | 15 | % Now check this actually matches our specific input 16 | testCase.assertThat(schema.required, ... 17 | IsSameSetAs(string(fieldnames(prototype)))); 18 | testCase.assertClass(schema.properties.name,"struct"); 19 | testCase.verifyEqual(schema.properties.name.type,"string"); 20 | testCase.verifyEqual(schema.properties.age.type,"number"); 21 | end 22 | 23 | function nonScalarFields(testCase) 24 | import matlab.unittest.constraints.IsSameSetAs 25 | 26 | prototype = struct("values",[1.2,3.4,5.67]); 27 | schema = llms.internal.jsonSchemaFromPrototype(prototype); 28 | 29 | testCase.assertClassSchema(schema); 30 | 31 | % Now check this actually matches our specific input 32 | testCase.assertThat(fieldnames(schema.properties), ... 33 | IsSameSetAs(fieldnames(prototype))); 34 | testCase.assertClass(schema.properties.values,"struct"); 35 | testCase.verifyEqual(schema.properties.values.type,"array"); 36 | testCase.verifyEqual(schema.properties.values.items, ... 37 | struct("type","number")); 38 | end 39 | 40 | function nonScalarTopLevel(testCase) 41 | % OpenAI says the top level must have "type":"object", so we need 42 | % to wrap a nonscalar toplevel request into another object. 43 | import matlab.unittest.constraints.IsSameSetAs 44 | 45 | prototype = struct("values",[1.2,3.4,5.67]); 46 | prototype = [prototype;prototype]; 47 | schema = llms.internal.jsonSchemaFromPrototype(prototype); 48 | schema1 = llms.internal.jsonSchemaFromPrototype(prototype(1)); 49 | 50 | testCase.assertClassSchema(schema); 51 | 52 | % Now check this actually matches our specific input 53 | testCase.assertEqual(schema.required,{"result"}); 54 | testCase.assertEqual(schema.properties.result.type,"array"); 55 | testCase.assertEqual(schema.properties.result.items,schema1); 56 | end 57 | 58 | function allScalarTypes(testCase) 59 | import matlab.unittest.constraints.IsSameSetAs 60 | import matlab.unittest.constraints.HasField 61 | prototype = struct( ... 62 | "string","string", ... 63 | "cellstr",{{''}}, ... 64 | "integer",uint8(42), ... 65 | "number",4.2, ... 66 | "boolean",true, ... 67 | "object",struct("a",1), ... 68 | "array",{[1,2,3]}, ... 69 | "enum",categorical("a",["a","b","c"]), ... 70 | "missing", missing); 71 | schema = llms.internal.jsonSchemaFromPrototype(prototype); 72 | 73 | testCase.assertClassSchema(schema); 74 | 75 | testCase.assertThat(schema.required, ... 76 | IsSameSetAs(string(fieldnames(prototype)))); 77 | testCase.verifyEqual(schema.properties.string.type,"string"); 78 | testCase.verifyThat(schema.properties.string,~HasField("enum")); 79 | testCase.verifyEqual(schema.properties.cellstr.type,"string"); 80 | testCase.verifyThat(schema.properties.cellstr,~HasField("enum")); 81 | testCase.verifyEqual(schema.properties.integer.type,"integer"); 82 | testCase.verifyEqual(schema.properties.number.type,"number"); 83 | testCase.verifyEqual(schema.properties.boolean.type,"boolean"); 84 | testCase.verifyEqual(schema.properties.object.type,"object"); 85 | testCase.verifyEqual(schema.properties.array.type,"array"); 86 | testCase.verifyEqual(schema.properties.enum.type,"string"); 87 | testCase.assertThat(schema.properties.enum,HasField("enum")); 88 | % orientation does not matter 89 | testCase.verifyEqual(schema.properties.enum.enum,{'a','b','c'}.'); 90 | testCase.verifyEqual(schema.properties.missing.type,"null"); 91 | end 92 | 93 | function userFrontend(testCase) 94 | import matlab.unittest.constraints.StartsWithSubstring 95 | import matlab.unittest.constraints.EndsWithSubstring 96 | import matlab.unittest.constraints.ContainsSubstring 97 | 98 | schema = llms.jsonSchemaFromPrototype(struct("str","","int",uint16(1))); 99 | 100 | testCase.assertClass(schema,"string"); 101 | testCase.verifyThat(schema,StartsWithSubstring("{")); 102 | testCase.verifyThat(schema,EndsWithSubstring("}")); 103 | testCase.verifyThat(schema,ContainsSubstring('"type": "integer"')); 104 | end 105 | 106 | function errors(testCase) 107 | testCase.verifyError( ... 108 | @() llms.internal.jsonSchemaFromPrototype(struct("a",datetime)), ... 109 | "llms:unsupportedDatatypeInPrototype"); 110 | end 111 | end 112 | 113 | methods 114 | function assertClassSchema(testCase,schema) 115 | import matlab.unittest.constraints.IsSupersetOf 116 | import matlab.unittest.constraints.IsSameSetAs 117 | 118 | testCase.assertClass(schema,"struct"); 119 | % fields as required by OpenAI 120 | testCase.assertThat(fieldnames(schema),... 121 | IsSupersetOf({'type','properties','required',... 122 | 'additionalProperties'})); 123 | testCase.verifyEqual(schema.type,"object"); 124 | testCase.verifyEqual(schema.additionalProperties,false); 125 | testCase.assertClass(schema.properties,"struct"); 126 | required = schema.required; 127 | if iscell(required) 128 | required = required{1}{1}; 129 | end 130 | testCase.assertThat(string(fieldnames(schema.properties)), ... 131 | IsSameSetAs(required)); 132 | end 133 | end 134 | end 135 | -------------------------------------------------------------------------------- /tests/topenAIMessages.m: -------------------------------------------------------------------------------- 1 | classdef topenAIMessages < matlab.unittest.TestCase 2 | % Tests for openAIMessages backward compatibility function 3 | 4 | % Copyright 2023-2024 The MathWorks, Inc. 5 | 6 | methods(Test) 7 | function returnsMessageHistory(testCase) 8 | testCase.verifyClass(openAIMessages,"messageHistory"); 9 | end 10 | end 11 | end 12 | -------------------------------------------------------------------------------- /tests/tresponseStreamer.m: -------------------------------------------------------------------------------- 1 | classdef tresponseStreamer < matlab.unittest.TestCase 2 | % Tests for llms.stream.reponseStreamer 3 | % 4 | % This test file contains unit tests, with a specific focus on edge cases that 5 | % are hard to trigger in end-to-end tests. 6 | 7 | % Copyright 2024 The MathWorks, Inc. 8 | 9 | methods (Test) 10 | function singleResponse(testCase) 11 | s = tracingStreamer; 12 | inp = 'data: {"choices":[{"content_filter_results":{},"delta":{"content":"foo","role":"assistant"}}]}'; 13 | inp = [inp newline 'data: [DONE]']; 14 | inp = unicode2native(inp,"UTF-8").'; 15 | testCase.verifyTrue(s.doPutData(inp,false)); 16 | testCase.verifyEqual(s.StreamFun(),"foo"); 17 | end 18 | 19 | function skipEmpty(testCase) 20 | s = tracingStreamer; 21 | inp = [... 22 | 'data: {"choices":[{"content_filter_results":{},"delta":{"content":"foo","role":"assistant"}}]}' newline ... 23 | 'data: {"choices":[]}' newline ... 24 | 'data: [DONE]']; 25 | inp = unicode2native(inp,"UTF-8").'; 26 | testCase.verifyTrue(s.doPutData(inp,false)); 27 | testCase.verifyEqual(s.StreamFun(),"foo"); 28 | end 29 | 30 | function splitResponse(testCase) 31 | % it can happen that the server sends packets split in the 32 | % middle of a JSON object. Hard to trigger on purpose. 33 | s = tracingStreamer; 34 | inp = 'data: {"choices":[{"content_filter_results":{},"delta":{"content":"foo","role":"assistant"}}]}'; 35 | inp = unicode2native(inp,"UTF-8").'; 36 | testCase.verifyFalse(s.doPutData(inp(1:42),false)); 37 | testCase.verifyFalse(s.doPutData(inp(43:end),false)); 38 | testCase.verifyEqual(s.StreamFun(),"foo"); 39 | end 40 | 41 | function ollamaFormat(testCase) 42 | s = tracingStreamer; 43 | inp = '{"model":"mistral","created_at":"2024-06-07T07:43:30.658793Z","message":{"role":"assistant","content":" Hello"},"done":false}'; 44 | inp = unicode2native(inp,"UTF-8").'; 45 | testCase.verifyFalse(s.doPutData(inp,false)); 46 | inp = '{"model":"mistral","created_at":"2024-06-07T07:43:30.658793Z","message":{"role":"assistant","content":" World"},"done":true}'; 47 | inp = unicode2native(inp,"UTF-8").'; 48 | testCase.verifyTrue(s.doPutData(inp,false)); 49 | testCase.verifyEqual(s.StreamFun(),[" Hello"," World"]); 50 | end 51 | 52 | function badJSON(testCase) 53 | s = tracingStreamer; 54 | inp = 'data: {"choices":[{"content_filter_results":{};"delta":{"content":"foo","role":"assistant"}}]}'; 55 | inp = [inp newline inp]; 56 | inp = unicode2native(inp,"UTF-8").'; 57 | testCase.verifyError(@() s.doPutData(inp,false),'llms:stream:responseStreamer:InvalidInput'); 58 | testCase.verifyEmpty(s.StreamFun()); 59 | end 60 | end 61 | end 62 | 63 | function s = tracingStreamer 64 | data = strings(1, 0); 65 | function seen = sf(str) 66 | % Append streamed text to an empty string array of length 1 67 | if nargin > 0 68 | data = [data, str]; 69 | end 70 | seen = data; 71 | end 72 | s = llms.stream.responseStreamer(@sf); 73 | end 74 | -------------------------------------------------------------------------------- /tests/tuseSameFieldTypes.m: -------------------------------------------------------------------------------- 1 | classdef tuseSameFieldTypes < matlab.unittest.TestCase 2 | % Unit tests for llms.internal.useSameFieldTypes 3 | 4 | % Copyright 2024 The MathWorks, Inc. 5 | 6 | methods(Test) 7 | function allSupportedDatatypes(testCase) 8 | % except for alternatives, because those cannot be tested with a simple verifyEqual 9 | prototype = struct(... 10 | "string", "", ... 11 | "char", {''}, ... 12 | "double", 1, ... 13 | "logical", true, ... 14 | "categorical", categorical("green",["red","green","blue","seashell"])); 15 | data = struct(... 16 | "string", {''}, ... 17 | "char", "", ... 18 | "double", uint8(1), ... 19 | "logical", 1, ... 20 | "categorical", "green"); 21 | 22 | converted = llms.internal.useSameFieldTypes(data,prototype); 23 | testCase.verifyEqual(converted, prototype); 24 | end 25 | 26 | function arrayOfStruct(testCase) 27 | prototype = struct("a", [true, true]); 28 | data = struct("a",[1,1,0]); 29 | expected = struct("a",[true,true,false]); 30 | 31 | testCase.verifyEqual(llms.internal.useSameFieldTypes(data,prototype), expected); 32 | end 33 | 34 | function noErrors(testCase) 35 | % If the LLM sends back unexpected data, we do not want to 36 | % throw an error in the useSameFieldTypes function 37 | prototype = struct("a",1); 38 | data = struct("b",1); 39 | 40 | testCase.verifyEqual(llms.internal.useSameFieldTypes(data,prototype),data); 41 | end 42 | end 43 | 44 | end 45 | --------------------------------------------------------------------------------