Skip to content

Commit

Permalink
OpenAI-DotNet 7.0.5 (#135)
Browse files Browse the repository at this point in the history
- Fixed Message.Content serialization in Role.Function message history

---------

Co-authored-by: nick-zocdoc <[email protected]>
Co-authored-by: Nick Ganju <[email protected]>
  • Loading branch information
3 people authored Aug 10, 2023
1 parent 629c5b9 commit 0f6359c
Show file tree
Hide file tree
Showing 4 changed files with 22 additions and 17 deletions.
23 changes: 13 additions & 10 deletions OpenAI-DotNet-Tests/TestFixture_03_Chat.cs
Original file line number Diff line number Diff line change
Expand Up @@ -157,7 +157,7 @@ public async Task Test_4_GetChatFunctionCompletion()
})
};

var chatRequest = new ChatRequest(messages, functions: functions, functionCall: "auto", model: "gpt-3.5-turbo-0613");
var chatRequest = new ChatRequest(messages, functions: functions, functionCall: "auto", model: "gpt-3.5-turbo");
var result = await OpenAIClient.ChatEndpoint.GetCompletionAsync(chatRequest);
Assert.IsNotNull(result);
Assert.IsNotNull(result.Choices);
Expand All @@ -169,7 +169,7 @@ public async Task Test_4_GetChatFunctionCompletion()
var locationMessage = new Message(Role.User, "I'm in Glasgow, Scotland");
messages.Add(locationMessage);
Console.WriteLine($"{locationMessage.Role}: {locationMessage.Content}");
chatRequest = new ChatRequest(messages, functions: functions, functionCall: "auto", model: "gpt-3.5-turbo-0613");
chatRequest = new ChatRequest(messages, functions: functions, functionCall: "auto", model: "gpt-3.5-turbo");
result = await OpenAIClient.ChatEndpoint.GetCompletionAsync(chatRequest);

Assert.IsNotNull(result);
Expand All @@ -184,7 +184,7 @@ public async Task Test_4_GetChatFunctionCompletion()
var unitMessage = new Message(Role.User, "celsius");
messages.Add(unitMessage);
Console.WriteLine($"{unitMessage.Role}: {unitMessage.Content}");
chatRequest = new ChatRequest(messages, functions: functions, functionCall: "auto", model: "gpt-3.5-turbo-0613");
chatRequest = new ChatRequest(messages, functions: functions, functionCall: "auto", model: "gpt-3.5-turbo");
result = await OpenAIClient.ChatEndpoint.GetCompletionAsync(chatRequest);
Assert.IsNotNull(result);
Assert.IsNotNull(result.Choices);
Expand All @@ -198,8 +198,11 @@ public async Task Test_4_GetChatFunctionCompletion()
var functionArgs = JsonSerializer.Deserialize<WeatherArgs>(result.FirstChoice.Message.Function.Arguments.ToString());
var functionResult = WeatherService.GetCurrentWeather(functionArgs);
Assert.IsNotNull(functionResult);
messages.Add(new Message(Role.Function, functionResult));
messages.Add(new Message(Role.Function, functionResult, nameof(WeatherService.GetCurrentWeather)));
Console.WriteLine($"{Role.Function}: {functionResult}");
chatRequest = new ChatRequest(messages, functions: functions, functionCall: "auto", model: "gpt-3.5-turbo-0613");
result = await OpenAIClient.ChatEndpoint.GetCompletionAsync(chatRequest);
Console.WriteLine(result);
}

[Test]
Expand Down Expand Up @@ -241,7 +244,7 @@ public async Task Test_5_GetChatFunctionCompletion_Streaming()
})
};

var chatRequest = new ChatRequest(messages, functions: functions, functionCall: "auto", model: "gpt-3.5-turbo-0613");
var chatRequest = new ChatRequest(messages, functions: functions, functionCall: "auto", model: "gpt-3.5-turbo");
var result = await OpenAIClient.ChatEndpoint.StreamCompletionAsync(chatRequest, partialResponse =>
{
Assert.IsNotNull(partialResponse);
Expand All @@ -266,7 +269,7 @@ public async Task Test_5_GetChatFunctionCompletion_Streaming()
var locationMessage = new Message(Role.User, "I'm in Glasgow, Scotland");
messages.Add(locationMessage);
Console.WriteLine($"{locationMessage.Role}: {locationMessage.Content}");
chatRequest = new ChatRequest(messages, functions: functions, functionCall: "auto", model: "gpt-3.5-turbo-0613");
chatRequest = new ChatRequest(messages, functions: functions, functionCall: "auto", model: "gpt-3.5-turbo");
result = await OpenAIClient.ChatEndpoint.StreamCompletionAsync(chatRequest, partialResponse =>
{
Assert.IsNotNull(partialResponse);
Expand Down Expand Up @@ -295,7 +298,7 @@ public async Task Test_5_GetChatFunctionCompletion_Streaming()
var unitMessage = new Message(Role.User, "celsius");
messages.Add(unitMessage);
Console.WriteLine($"{unitMessage.Role}: {unitMessage.Content}");
chatRequest = new ChatRequest(messages, functions: functions, functionCall: "auto", model: "gpt-3.5-turbo-0613");
chatRequest = new ChatRequest(messages, functions: functions, functionCall: "auto", model: "gpt-3.5-turbo");
result = await OpenAIClient.ChatEndpoint.StreamCompletionAsync(chatRequest, partialResponse =>
{
Assert.IsNotNull(partialResponse);
Expand Down Expand Up @@ -325,7 +328,7 @@ public async Task Test_5_GetChatFunctionCompletion_Streaming()
var functionArgs = JsonSerializer.Deserialize<WeatherArgs>(result.FirstChoice.Message.Function.Arguments.ToString());
var functionResult = WeatherService.GetCurrentWeather(functionArgs);
Assert.IsNotNull(functionResult);
messages.Add(new Message(Role.Function, functionResult));
messages.Add(new Message(Role.Function, functionResult, nameof(WeatherService.GetCurrentWeather)));
Console.WriteLine($"{Role.Function}: {functionResult}");
}

Expand Down Expand Up @@ -369,7 +372,7 @@ public async Task Test_6_GetChatFunctionForceCompletion()
})
};

var chatRequest = new ChatRequest(messages, functions: functions, functionCall: null, model: "gpt-3.5-turbo-0613");
var chatRequest = new ChatRequest(messages, functions: functions, functionCall: null, model: "gpt-3.5-turbo");
var result = await OpenAIClient.ChatEndpoint.GetCompletionAsync(chatRequest);
Assert.IsNotNull(result);
Assert.IsNotNull(result.Choices);
Expand Down Expand Up @@ -400,7 +403,7 @@ public async Task Test_6_GetChatFunctionForceCompletion()
var functionArgs = JsonSerializer.Deserialize<WeatherArgs>(result.FirstChoice.Message.Function.Arguments.ToString());
var functionResult = WeatherService.GetCurrentWeather(functionArgs);
Assert.IsNotNull(functionResult);
messages.Add(new Message(Role.Function, functionResult));
messages.Add(new Message(Role.Function, functionResult, nameof(WeatherService.GetCurrentWeather)));
Console.WriteLine($"{Role.Function}: {functionResult}");
}
}
Expand Down
2 changes: 1 addition & 1 deletion OpenAI-DotNet/Chat/Message.cs
Original file line number Diff line number Diff line change
Expand Up @@ -45,7 +45,7 @@ public Message(Role role, string content, string name = null, Function function
/// </summary>
[JsonInclude]
[JsonPropertyName("content")]
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingDefault)]
[JsonIgnore(Condition = JsonIgnoreCondition.Never)]
public string Content { get; private set; }

/// <summary>
Expand Down
6 changes: 4 additions & 2 deletions OpenAI-DotNet/OpenAI-DotNet.csproj
Original file line number Diff line number Diff line change
Expand Up @@ -17,8 +17,10 @@ More context [on Roger Pincombe's blog](https://rogerpincombe.com/openai-dotnet-
<RepositoryUrl>https://github.com/RageAgainstThePixel/OpenAI-DotNet</RepositoryUrl>
<PackageTags>OpenAI, AI, ML, API, gpt-4, gpt-3.5-tubo, gpt-3, chatGPT, chat-gpt, gpt-2, gpt</PackageTags>
<Title>OpenAI API</Title>
<Version>7.0.4</Version>
<PackageReleaseNotes>Version 7.0.4
<Version>7.0.5</Version>
<PackageReleaseNotes>Version 7.0.5
- Fixed Message.Content serialization in Role.Function message history
Version 7.0.4
- Fixed ChatRequest forced function calls
Version 7.0.3
- Fixed chat streaming message copy from delta
Expand Down
8 changes: 4 additions & 4 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -467,7 +467,7 @@ var functions = new List<Function>
})
};

var chatRequest = new ChatRequest(messages, functions: functions, functionCall: "auto", model: "gpt-3.5-turbo-0613");
var chatRequest = new ChatRequest(messages, functions: functions, functionCall: "auto", model: "gpt-3.5-turbo");
var result = await OpenAIClient.ChatEndpoint.GetCompletionAsync(chatRequest);
messages.Add(result.FirstChoice.Message);

Expand All @@ -476,7 +476,7 @@ Console.WriteLine($"{result.FirstChoice.Message.Role}: {result.FirstChoice.Messa
var locationMessage = new Message(Role.User, "I'm in Glasgow, Scotland");
messages.Add(locationMessage);
Console.WriteLine($"{locationMessage.Role}: {locationMessage.Content}");
chatRequest = new ChatRequest(messages, functions: functions, functionCall: "auto", model: "gpt-3.5-turbo-0613");
chatRequest = new ChatRequest(messages, functions: functions, functionCall: "auto", model: "gpt-3.5-turbo");
result = await OpenAIClient.ChatEndpoint.GetCompletionAsync(chatRequest);

messages.Add(result.FirstChoice.Message);
Expand All @@ -488,15 +488,15 @@ if (!string.IsNullOrEmpty(result.FirstChoice.Message.Content))
var unitMessage = new Message(Role.User, "celsius");
messages.Add(unitMessage);
Console.WriteLine($"{unitMessage.Role}: {unitMessage.Content}");
chatRequest = new ChatRequest(messages, functions: functions, functionCall: "auto", model: "gpt-3.5-turbo-0613");
chatRequest = new ChatRequest(messages, functions: functions, functionCall: "auto", model: "gpt-3.5-turbo");
result = await OpenAIClient.ChatEndpoint.GetCompletionAsync(chatRequest);
}

Console.WriteLine($"{result.FirstChoice.Message.Role}: {result.FirstChoice.Message.Function.Name} | Finish Reason: {result.FirstChoice.FinishReason}");
Console.WriteLine($"{result.FirstChoice.Message.Function.Arguments}");
var functionArgs = JsonSerializer.Deserialize<WeatherArgs>(result.FirstChoice.Message.Function.Arguments.ToString());
var functionResult = WeatherService.GetCurrentWeather(functionArgs);
messages.Add(new Message(Role.Function, functionResult));
messages.Add(new Message(Role.Function, functionResult, nameof(WeatherService.GetCurrentWeather)));
Console.WriteLine($"{Role.Function}: {functionResult}");
// System: You are a helpful weather assistant.
// User: What's the weather like today?
Expand Down

0 comments on commit 0f6359c

Please sign in to comment.