diff --git a/MatGPT.mlapp b/MatGPT.mlapp index 382b98e..c785541 100644 Binary files a/MatGPT.mlapp and b/MatGPT.mlapp differ diff --git a/contents/presets.csv b/contents/presets.csv index 41c4ff2..6820b8e 100644 --- a/contents/presets.csv +++ b/contents/presets.csv @@ -2,18 +2,9 @@ name,content,prompt,model,max_tokens,temperature,test_code,suggested_questions,s AI Assistant,You are a helpful assistant. Answer as concisely as possible. ,Where is the capital of France?,gpt-3.5-turbo,1000,1,0,1,1 Read a web page,You are a helpful assistant that can read a small web page and analyze its content. The content of the page will be extracted by an external function and stored in the chat history. ,What is this page about? https://www.mathworks.com/help/matlab/text-files.html,gpt-3.5-turbo,1000,0,0,1,1 Read a local file,You are a helpful assistant that can read a small file and analyze its content. The content of the file will be extracted by an external function and stored in the chat history. ,Select a file using the paper clip icon on the left.,gpt-3.5-turbo,1000,0,0,1,1 -Understand an image,You are a helpful assistant that can see an image and analyze its content.,What is in this image? https://www.mathworks.com/help/examples/matlab/win64/DisplayGrayscaleRGBIndexedOrBinaryImageExample_04.png,gpt-4-vision-preview,1000,1,0,0,0 -Generate an image,,Create a 3D avatar of a whimsical sushi on the beach. He is decorated with various sushi elements and is playfully interacting with the beach environment.,dall-e-3,Inf,0,0,0,0 +Understand an image,You are a helpful assistant that can see an image and analyze its content.,What is in this image? https://www.mathworks.com/help/examples/matlab/win64/DisplayGrayscaleRGBIndexedOrBinaryImageExample_04.png,gpt-4-turbo,1000,1,0,1,1 +Generate an image,You are an AI assistant that can generate an image from a text prompt.,"An adventurous man clad in athletic gear is kiteboarding, harnessing the wind with his kite to skim across the waters of the famed Charles River in Boston. Nearby, a picturesque sailboat dances with the breeze. The city's skyline paints a stunning backdrop to the scene, while the surface of the river mirrors the golden hues of the setting sun. ",dall-e-3,4096,1,0,0,0 English to MATLAB Code,You are a helpful assistant that generates MATLAB code. ,"Define two random vectors x and y, fit a linear model to the data, and plot both the data and fitted line.",gpt-3.5-turbo,1000,0,1,1,1 -English to Simulink Model,"You are a helpful assistant that creates Simulink models. -You create the models by generating MATLAB code that adds all the necessary blocks and set their parameters. -Automatically arrange the blocks by adding this line of code: -Simulink.BlockDiagram.arrangeSystem(model) -Save the model and run it.","Model name: 'sine_multiplied' -Add Sine Wave block with amplification = 1. -Multiply the sine wave signal by 3. -Add Scope block with 2 input ports. -Visualize both signals by connecting them to the same Scope block. ",gpt-3.5-turbo,1000,0,1,1,1 Summarize Code,You are a friendly and helpful teaching assistant for MATLAB programmers. Analyze MATLAB code and provide concise summary of what the code does. ,"Summarize what the following code does. Code by Cecelya Blooming Light in MATLAB Mini Hack 2022 ```matlab diff --git a/helpers/MsgHelper.m b/helpers/MsgHelper.m index 68821ab..c130e2f 100644 --- a/helpers/MsgHelper.m +++ b/helpers/MsgHelper.m @@ -9,15 +9,14 @@ % provide context window for a given model % supported models models = [ ... - struct('name','gpt-3.5-turbo','attributes',struct('contextwindow',4096,'cutoff','Sep 2021'),'legacy',false), ... - struct('name','gpt-3.5-turbo-1106','attributes',struct('contextwindow',16385,'cutoff','Sep 2021'),'legacy',false), ... - struct('name','gpt-3.5-turbo-16k','attributes',struct('contextwindow',16385,'cutoff','Sep 2021'),'legacy',false), ... - struct('name','gpt-4','attributes',struct('contextwindow',8192,'cutoff','Sep 2021'),'legacy',false), ... - struct('name','gpt-4-0613','attributes',struct('contextwindow',8192,'cutoff','Sep 2021'),'legacy',false), ... - struct('name','gpt-4-1106-preview','attributes',struct('contextwindow',128000,'cutoff','Apr 2023'),'legacy',false), ... - struct('name','gpt-4-vision-preview','attributes',struct('contextwindow',128000,'cutoff','Apr 2023'),'legacy',false), ... - struct('name','gpt-4-turbo-preview','attributes',struct('contextwindow',128000,'cutoff','Apr 2023'),'legacy',false), ... - struct('name','dall-e-3','attributes',struct('contextwindow','n/a','cutoff','n/a'),'legacy',false), ... + struct('name','gpt-3.5-turbo','attributes',struct('contextwindow',16385,'cutoff','Sep 2021')), ... + struct('name','gpt-3.5-turbo-0125','attributes',struct('contextwindow',16385,'cutoff','Sep 2021')), ... + struct('name','gpt-3.5-turbo-1106','attributes',struct('contextwindow',16385,'cutoff','Sep 2021')), ... + struct('name','gpt-4','attributes',struct('contextwindow',8192,'cutoff','Sep 2021')), ... + struct('name','gpt-4-0613','attributes',struct('contextwindow',8192,'cutoff','Sep 2021')), ... + struct('name','gpt-4-turbo-2024-04-09','attributes',struct('contextwindow',128000,'cutoff','Dec 2023')), ... + struct('name','gpt-4-turbo','attributes',struct('contextwindow',128000,'cutoff','Dec 2023')), ... + struct('name','dall-e-3','attributes',struct('contextwindow',16385,'cutoff','Sep 2021')), ... ]; contextwindow = models(arrayfun(@(x) string(x.name), models) == modelName).attributes.contextwindow; cutoff = models(arrayfun(@(x) string(x.name), models) == modelName).attributes.cutoff; @@ -77,7 +76,8 @@ % extract content from messages contents = strings(size(messages.Messages)); - isText = ~cellfun(@(x) isempty(x.content), messages.Messages); + isText = cellfun(@(x) isstring(x.content), messages.Messages); + isText(cellfun(@(x) isempty(x.content), messages.Messages(isText))) = 0; contents(isText) = cellfun(@(x) x.content, messages.Messages(isText)); isTestReport = startsWith(contents,'
'); if any(isTestReport) diff --git a/helpers/imageGenGenerate.m b/helpers/imageGenGenerate.m new file mode 100644 index 0000000..04caef7 --- /dev/null +++ b/helpers/imageGenGenerate.m @@ -0,0 +1,29 @@ +function [image,message,response] = imageGenGenerate(chat,messages) +% IMAGEGENGENERATE generates image using chat api with function calling + + % generate a response to the prompt for image generation + [txt,message,response] = generate(chat,messages); + % if tool_call is returned + if isfield(message,"tool_calls") + toolCalls = message.tool_calls; + fcn = toolCalls.function.name; + % make sure it calls for 'generateImage' + if strcmp(fcn,"generateImage") + args = jsondecode(toolCalls.function.arguments); + prompt = string(args.prompt); + % execute 'generateImage' using the provided prompt + [image,revisedPrompt,response] = generateImage(prompt); + message = struct("role","assistant","content", string(revisedPrompt)); + end + else + image = txt; + end +end + +% helper function 'generateImage' to call DALL-E 3 +function [image, revisedPrompt ,response] = generateImage(prompt) + mdl = openAIImages(ModelName="dall-e-3"); + [images, response] = generate(mdl,string(prompt)); + image = images{1}; + revisedPrompt = response.Body.Data.data.revised_prompt; +end \ No newline at end of file diff --git a/helpers/llms-with-matlab/+llms/+internal/callOpenAIChatAPI.m b/helpers/llms-with-matlab/+llms/+internal/callOpenAIChatAPI.m index 3cd485c..6226653 100644 --- a/helpers/llms-with-matlab/+llms/+internal/callOpenAIChatAPI.m +++ b/helpers/llms-with-matlab/+llms/+internal/callOpenAIChatAPI.m @@ -119,18 +119,16 @@ parameters.stream = ~isempty(nvp.StreamFun); -if ~isempty(functions) && ~strcmp(nvp.ModelName,'gpt-4-vision-preview') +if ~isempty(functions) parameters.tools = functions; end -if ~isempty(nvp.ToolChoice) && ~strcmp(nvp.ModelName,'gpt-4-vision-preview') +if ~isempty(nvp.ToolChoice) parameters.tool_choice = nvp.ToolChoice; end -if ismember(nvp.ModelName,["gpt-3.5-turbo-1106","gpt-4-1106-preview"]) - if strcmp(nvp.ResponseFormat,"json") - parameters.response_format = struct('type','json_object'); - end +if strcmp(nvp.ResponseFormat,"json") + parameters.response_format = struct('type','json_object'); end if ~isempty(nvp.Seed) @@ -142,15 +140,21 @@ dict = mapNVPToParameters; nvpOptions = keys(dict); -if strcmp(nvp.ModelName,'gpt-4-vision-preview') - nvpOptions(ismember(nvpOptions,"StopSequences")) = []; -end for opt = nvpOptions.' if isfield(nvp, opt) parameters.(dict(opt)) = nvp.(opt); end end + +if isempty(nvp.StopSequences) + parameters = rmfield(parameters,"stop"); +end + +if nvp.MaxNumTokens == Inf + parameters = rmfield(parameters,"max_tokens"); +end + end function dict = mapNVPToParameters() diff --git a/helpers/llms-with-matlab/+llms/+utils/errorMessageCatalog.m b/helpers/llms-with-matlab/+llms/+utils/errorMessageCatalog.m index 3791319..caf4c5e 100644 --- a/helpers/llms-with-matlab/+llms/+utils/errorMessageCatalog.m +++ b/helpers/llms-with-matlab/+llms/+utils/errorMessageCatalog.m @@ -49,8 +49,10 @@ catalog("llms:mustBeMessagesOrTxt") = "Messages must be text with one or more characters or an openAIMessages objects."; catalog("llms:invalidOptionAndValueForModel") = "'{1}' with value '{2}' is not supported for ModelName '{3}'"; catalog("llms:invalidOptionForModel") = "{1} is not supported for ModelName '{2}'"; +catalog("llms:invalidContentTypeForModel") = "{1} is not supported for ModelName '{2}'"; catalog("llms:functionNotAvailableForModel") = "This function is not supported for ModelName '{1}'"; catalog("llms:promptLimitCharacter") = "Prompt must have a maximum length of {1} characters for ModelName '{2}'"; catalog("llms:pngExpected") = "Argument must be a PNG image."; catalog("llms:warningJsonInstruction") = "When using JSON mode, you must also prompt the model to produce JSON yourself via a system or user message."; +catalog("llms:apiReturnedError") = "OpenAI API Error: {1}"; end \ No newline at end of file diff --git a/helpers/llms-with-matlab/README.md b/helpers/llms-with-matlab/README.md index b1fdcf9..a003170 100644 --- a/helpers/llms-with-matlab/README.md +++ b/helpers/llms-with-matlab/README.md @@ -5,20 +5,36 @@ This repository contains example code to demonstrate how to connect MATLAB to th The functionality shown here serves as an interface to the ChatGPT and DALL·E APIs. To start using the OpenAI APIs, you first need to obtain OpenAI API keys. You are responsible for any fees OpenAI may charge for the use of their APIs. You should be familiar with the limitations and risks associated with using this technology, and you agree that you shall be solely responsible for full compliance with any terms that may apply to your use of the OpenAI APIs. Some of the current LLMs supported are: -- gpt-3.5-turbo, gpt-3.5-turbo-1106 -- gpt-4, gpt-4-1106-preview -- gpt-4-vision-preview (a.k.a. GPT-4 Turbo with Vision) +- gpt-3.5-turbo, gpt-3.5-turbo-1106, gpt-3.5-turbo-0125 +- gpt-4-turbo, gpt-4-turbo-2024-04-09 (GPT-4 Turbo with Vision) +- gpt-4, gpt-4-0613 - dall-e-2, dall-e-3 - + For details on the specification of each model, check the official [OpenAI documentation](https://platform.openai.com/docs/models). +## Requirements + +### MathWorks Products (https://www.mathworks.com) + +- Requires MATLAB release R2024a or newer. +- Some examples require Text Analytics Toolbox™. + +### 3rd Party Products: + +- An active OpenAI API subscription and API key. + ## Setup -If you would like to use this repository with MATLAB Online, simply click [![Open in MATLAB Online](https://www.mathworks.com/images/responsive/global/open-in-matlab-online.svg)](https://matlab.mathworks.com/open/github/v1?repo=matlab-deep-learning/llms-with-matlab) +### MATLAB Online + +To use this repository with MATLAB Online, click [![Open in MATLAB Online](https://www.mathworks.com/images/responsive/global/open-in-matlab-online.svg)](https://matlab.mathworks.com/open/github/v1?repo=matlab-deep-learning/llms-with-matlab) + + +### MATLAB Desktop -If you would like to use it with MATLAB Desktop, proceed with the following steps: +To use this repository with a local installation of MATLAB, first clone the repository. -1. Clone the repository to your local machine. +1. In the system command prompt, run: ```bash git clone https://github.com/matlab-deep-learning/llms-with-matlab.git @@ -32,26 +48,19 @@ If you would like to use it with MATLAB Desktop, proceed with the following step addpath('path/to/llms-with-matlab'); ``` -4. Set up your OpenAI API key. Create a `.env` file in the project root directory with the following content. +### Setting up your API key - ``` - OPENAI_API_KEY= - ``` - - Then load your `.env` file as follows: +Set up your OpenAI API key. Create a `.env` file in the project root directory with the following content. - ```matlab - loadenv(".env") - ``` - -### MathWorks Products (https://www.mathworks.com) - -- Requires MATLAB release R2023a or newer. - -### 3rd Party Products: - -- An active OpenAI API subscription and API key. +``` +OPENAI_API_KEY= +``` + +Then load your `.env` file as follows: +```matlab +loadenv(".env") +``` ## Getting Started with Chat Completion API @@ -278,13 +287,13 @@ You can extract the arguments and write the data to a table, for example. ### Understand the content of an image -You can use gpt-4-vision-preview to experiment with image understanding. +You can use gpt-4-turbo to experiment with image understanding. ```matlab -chat = openAIChat("You are an AI assistant.", ModelName="gpt-4-vision-preview"); +chat = openAIChat("You are an AI assistant.", ModelName="gpt-4-turbo"); image_path = "peppers.png"; messages = openAIMessages; messages = addUserMessageWithImages(messages,"What is in the image?",image_path); -[txt,response] = generate(chat,messages); +[txt,response] = generate(chat,messages,MaxNumTokens=4096); % Should output the description of the image ``` @@ -320,15 +329,16 @@ imshow(images{1}) ## Examples To learn how to use this in your workflows, see [Examples](/examples/). -- [ExampleStreaming.mlx](/examples/ExampleStreaming.mlx): Learn to implement a simple chat that stream the response. -- [ExampleSummarization.mlx](/examples/ExampleSummarization.mlx): Learn to create concise summaries of long texts with ChatGPT. (Requires Text Analytics Toolbox™) -- [ExampleChatBot.mlx](/examples/ExampleChatBot.mlx): Build a conversational chatbot capable of handling various dialogue scenarios using ChatGPT. (Requires Text Analytics Toolbox) -- [ExampleFunctionCalling.mlx](/examples/ExampleFunctionCalling.mlx): Learn how to create agents capable of executing MATLAB functions. +- [ProcessGeneratedTextinRealTimebyUsingChatGPTinStreamingMode.mlx](/examples/ProcessGeneratedTextinRealTimebyUsingChatGPTinStreamingMode.mlx): Learn to implement a simple chat that stream the response. +- [SummarizeLargeDocumentsUsingChatGPTandMATLAB.mlx](/examples/SummarizeLargeDocumentsUsingChatGPTandMATLAB.mlx): Learn to create concise summaries of long texts with ChatGPT. (Requires Text Analytics Toolbox™) +- [CreateSimpleChatBot.mlx](/examples/CreateSimpleChatBot.mlx): Build a conversational chatbot capable of handling various dialogue scenarios using ChatGPT. (Requires Text Analytics Toolbox) +- [AnalyzeScientificPapersUsingFunctionCalls.mlx](/examples/AnalyzeScientificPapersUsingFunctionCalls.mlx): Learn how to create agents capable of executing MATLAB functions. - [ExampleParallelFunctionCalls.mlx](/examples/ExampleParallelFunctionCalls.mlx): Learn how to take advantage of parallel function calling. -- [ExampleRetrievalAugmentedGeneration.mlx](/examples/ExampleRetrievalAugmentedGeneration.mlx): Learn about retrieval augmented generation with a simple use case. (Requires Text Analytics Toolbox™) -- [ExampleGPT4Vision.mlx](/examples/ExampleGPT4Vision.mlx): Learn how to use GPT-4 Turbo with Vision to understand the content of an image. -- [ExampleJSONMode.mlx](/examples/ExampleJSONMode.mlx): Learn how to use JSON mode in chat completions -- [ExampleDALLE.mlx](/examples/ExampleDALLE.mlx): Learn how to generate images, create variations and edit the images. +- [RetrievalAugmentedGenerationUsingChatGPTandMATLAB.mlx](/examples/RetrievalAugmentedGenerationUsingChatGPTandMATLAB.mlx): Learn about retrieval augmented generation with a simple use case. (Requires Text Analytics Toolbox™) +- [DescribeImagesUsingChatGPT.mlx](/examples/DescribeImagesUsingChatGPT.mlx): Learn how to use GPT-4 Turbo with Vision to understand the content of an image. +- [AnalyzeSentimentinTextUsingChatGPTinJSONMode.mlx](/examples/AnalyzeSentimentinTextUsingChatGPTinJSONMode.mlx): Learn how to use JSON mode in chat completions +- [UsingDALLEToEditImages.mlx](/examples/UsingDALLEToEditImages.mlx): Learn how to generate images +- [UsingDALLEToGenerateImages.mlx](/examples/UsingDALLEToGenerateImages.mlx): Create variations of images and editimages. ## License diff --git a/helpers/llms-with-matlab/examples/AnalyzeScientificPapersUsingFunctionCalls.mlx b/helpers/llms-with-matlab/examples/AnalyzeScientificPapersUsingFunctionCalls.mlx new file mode 100644 index 0000000..ddf2130 Binary files /dev/null and b/helpers/llms-with-matlab/examples/AnalyzeScientificPapersUsingFunctionCalls.mlx differ diff --git a/helpers/llms-with-matlab/examples/AnalyzeSentimentinTextUsingChatGPTinJSONMode.mlx b/helpers/llms-with-matlab/examples/AnalyzeSentimentinTextUsingChatGPTinJSONMode.mlx new file mode 100644 index 0000000..8fc5a63 Binary files /dev/null and b/helpers/llms-with-matlab/examples/AnalyzeSentimentinTextUsingChatGPTinJSONMode.mlx differ diff --git a/helpers/llms-with-matlab/examples/CreateSimpleChatBot.mlx b/helpers/llms-with-matlab/examples/CreateSimpleChatBot.mlx new file mode 100644 index 0000000..b83f547 Binary files /dev/null and b/helpers/llms-with-matlab/examples/CreateSimpleChatBot.mlx differ diff --git a/helpers/llms-with-matlab/examples/DescribeImagesUsingChatGPT.mlx b/helpers/llms-with-matlab/examples/DescribeImagesUsingChatGPT.mlx new file mode 100644 index 0000000..b55f4a2 Binary files /dev/null and b/helpers/llms-with-matlab/examples/DescribeImagesUsingChatGPT.mlx differ diff --git a/helpers/llms-with-matlab/examples/ExampleParallelFunctionCalls.mlx b/helpers/llms-with-matlab/examples/ExampleParallelFunctionCalls.mlx index 11ab88c..94f45ba 100644 Binary files a/helpers/llms-with-matlab/examples/ExampleParallelFunctionCalls.mlx and b/helpers/llms-with-matlab/examples/ExampleParallelFunctionCalls.mlx differ diff --git a/helpers/llms-with-matlab/examples/InformationRetrievalUsingOpenAIDocumentEmbedding.mlx b/helpers/llms-with-matlab/examples/InformationRetrievalUsingOpenAIDocumentEmbedding.mlx new file mode 100644 index 0000000..7768de1 Binary files /dev/null and b/helpers/llms-with-matlab/examples/InformationRetrievalUsingOpenAIDocumentEmbedding.mlx differ diff --git a/helpers/llms-with-matlab/examples/ProcessGeneratedTextinRealTimebyUsingChatGPTinStreamingMode.mlx b/helpers/llms-with-matlab/examples/ProcessGeneratedTextinRealTimebyUsingChatGPTinStreamingMode.mlx new file mode 100644 index 0000000..c74836b Binary files /dev/null and b/helpers/llms-with-matlab/examples/ProcessGeneratedTextinRealTimebyUsingChatGPTinStreamingMode.mlx differ diff --git a/helpers/llms-with-matlab/examples/RetrievalAugmentedGenerationUsingChatGPTandMATLAB.mlx b/helpers/llms-with-matlab/examples/RetrievalAugmentedGenerationUsingChatGPTandMATLAB.mlx new file mode 100644 index 0000000..1622a21 Binary files /dev/null and b/helpers/llms-with-matlab/examples/RetrievalAugmentedGenerationUsingChatGPTandMATLAB.mlx differ diff --git a/helpers/llms-with-matlab/examples/SummarizeLargeDocumentsUsingChatGPTandMATLAB.mlx b/helpers/llms-with-matlab/examples/SummarizeLargeDocumentsUsingChatGPTandMATLAB.mlx new file mode 100644 index 0000000..daa8cbf Binary files /dev/null and b/helpers/llms-with-matlab/examples/SummarizeLargeDocumentsUsingChatGPTandMATLAB.mlx differ diff --git a/helpers/llms-with-matlab/examples/UsingDALLEToEditImages.mlx b/helpers/llms-with-matlab/examples/UsingDALLEToEditImages.mlx new file mode 100644 index 0000000..b01e891 Binary files /dev/null and b/helpers/llms-with-matlab/examples/UsingDALLEToEditImages.mlx differ diff --git a/helpers/llms-with-matlab/examples/UsingDALLEToGenerateImages.mlx b/helpers/llms-with-matlab/examples/UsingDALLEToGenerateImages.mlx new file mode 100644 index 0000000..f0ce158 Binary files /dev/null and b/helpers/llms-with-matlab/examples/UsingDALLEToGenerateImages.mlx differ diff --git a/helpers/llms-with-matlab/openAIChat.m b/helpers/llms-with-matlab/openAIChat.m index aced1cb..395d166 100644 --- a/helpers/llms-with-matlab/openAIChat.m +++ b/helpers/llms-with-matlab/openAIChat.m @@ -114,10 +114,10 @@ arguments systemPrompt {llms.utils.mustBeTextOrEmpty} = [] nvp.Tools (1,:) {mustBeA(nvp.Tools, "openAIFunction")} = openAIFunction.empty - nvp.ModelName (1,1) {mustBeMember(nvp.ModelName,["gpt-4", "gpt-4-0613", "gpt-4-32k", ... - "gpt-3.5-turbo", "gpt-3.5-turbo-16k",... - "gpt-4-1106-preview","gpt-3.5-turbo-1106", ... - "gpt-4-vision-preview", "gpt-4-turbo-preview"])} = "gpt-3.5-turbo" + nvp.ModelName (1,1) {mustBeMember(nvp.ModelName,["gpt-4-turbo", ... + "gpt-4-turbo-2024-04-09","gpt-4","gpt-4-0613", ... + "gpt-3.5-turbo","gpt-3.5-turbo-0125", ... + "gpt-3.5-turbo-1106"])} = "gpt-3.5-turbo" nvp.Temperature {mustBeValidTemperature} = 1 nvp.TopProbabilityMass {mustBeValidTopP} = 1 nvp.StopSequences {mustBeValidStop} = {} @@ -131,10 +131,6 @@ if isfield(nvp,"StreamFun") this.StreamFun = nvp.StreamFun; - if strcmp(nvp.ModelName,'gpt-4-vision-preview') - error("llms:invalidOptionForModel", ... - llms.utils.errorMessageCatalog.getMessage("llms:invalidOptionForModel", "StreamFun", nvp.ModelName)); - end else this.StreamFun = []; end @@ -146,10 +142,6 @@ else this.Tools = nvp.Tools; [this.FunctionsStruct, this.FunctionNames] = functionAsStruct(nvp.Tools); - if strcmp(nvp.ModelName,'gpt-4-vision-preview') - error("llms:invalidOptionForModel", ... - llms.utils.errorMessageCatalog.getMessage("llms:invalidOptionForModel", "Tools", nvp.ModelName)); - end end if ~isempty(systemPrompt) @@ -163,20 +155,15 @@ this.Temperature = nvp.Temperature; this.TopProbabilityMass = nvp.TopProbabilityMass; this.StopSequences = nvp.StopSequences; - if ~isempty(nvp.StopSequences) && strcmp(nvp.ModelName,'gpt-4-vision-preview') - error("llms:invalidOptionForModel", ... - llms.utils.errorMessageCatalog.getMessage("llms:invalidOptionForModel", "StopSequences", nvp.ModelName)); - end - % ResponseFormat is only supported in the latest models only if (nvp.ResponseFormat == "json") - if ismember(this.ModelName,["gpt-3.5-turbo-1106","gpt-4-1106-preview"]) - warning("llms:warningJsonInstruction", ... - llms.utils.errorMessageCatalog.getMessage("llms:warningJsonInstruction")) - else + if ismember(this.ModelName,["gpt-4","gpt-4-0613"]) error("llms:invalidOptionAndValueForModel", ... llms.utils.errorMessageCatalog.getMessage("llms:invalidOptionAndValueForModel", "ResponseFormat", "json", this.ModelName)); + else + warning("llms:warningJsonInstruction", ... + llms.utils.errorMessageCatalog.getMessage("llms:warningJsonInstruction")) end end @@ -222,10 +209,6 @@ end toolChoice = convertToolChoice(this, nvp.ToolChoice); - if ~isempty(nvp.ToolChoice) && strcmp(this.ModelName,'gpt-4-vision-preview') - error("llms:invalidOptionForModel", ... - llms.utils.errorMessageCatalog.getMessage("llms:invalidOptionForModel", "ToolChoice", this.ModelName)); - end if isstring(messages) && isscalar(messages) messagesStruct = {struct("role", "user", "content", messages)}; @@ -233,6 +216,13 @@ messagesStruct = messages.Messages; end + if iscell(messagesStruct{end}.content) && any(cellfun(@(x) isfield(x,"image_url"), messagesStruct{end}.content)) + if ~ismember(this.ModelName,["gpt-4-turbo","gpt-4-turbo-2024-04-09"]) + error("llms:invalidContentTypeForModel", ... + llms.utils.errorMessageCatalog.getMessage("llms:invalidContentTypeForModel", "Image content", this.ModelName)); + end + end + if ~isempty(this.SystemPrompt) messagesStruct = horzcat(this.SystemPrompt, messagesStruct); end @@ -244,6 +234,13 @@ PresencePenalty=this.PresencePenalty, FrequencyPenalty=this.FrequencyPenalty, ... ResponseFormat=this.ResponseFormat,Seed=nvp.Seed, ... ApiKey=this.ApiKey,TimeOut=this.TimeOut, StreamFun=this.StreamFun); + + if isfield(response.Body.Data,"error") + err = response.Body.Data.error.message; + text = llms.utils.errorMessageCatalog.getMessage("llms:apiReturnedError",err); + message = struct("role","assistant","content",text); + end + end function this = set.Temperature(this, temperature)