Skip to content

Commit

Permalink
Ensure Ollama streaming updates specify a CompletionId. (#5795)
Browse files Browse the repository at this point in the history
  • Loading branch information
eiriktsarpalis authored Jan 14, 2025
1 parent 5b76196 commit b26772a
Show file tree
Hide file tree
Showing 4 changed files with 7 additions and 2 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -142,6 +142,7 @@ public async IAsyncEnumerable<StreamingChatCompletionUpdate> CompleteStreamingAs

StreamingChatCompletionUpdate update = new()
{
CompletionId = chunk.CreatedAt,
Role = chunk.Message?.Role is not null ? new ChatRole(chunk.Message.Role) : null,
CreatedAt = DateTimeOffset.TryParse(chunk.CreatedAt, CultureInfo.InvariantCulture, DateTimeStyles.None, out DateTimeOffset createdAt) ? createdAt : null,
FinishReason = ToFinishReason(chunk),
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@
using System;
using System.Collections.Generic;
using System.Diagnostics.CodeAnalysis;
using System.Globalization;
using System.Text;
using System.Text.Json;
using System.Text.Json.Serialization;
Expand Down Expand Up @@ -54,7 +55,7 @@ public static OpenAI.Chat.ChatCompletion ToOpenAIChatCompletion(ChatCompletion c
}

return OpenAIChatModelFactory.ChatCompletion(
id: chatCompletion.CompletionId,
id: chatCompletion.CompletionId ?? CreateCompletionId(),
model: chatCompletion.ModelId,
createdAt: chatCompletion.CreatedAt ?? default,
role: ToOpenAIChatRole(chatCompletion.Message.Role).Value,
Expand Down Expand Up @@ -584,6 +585,8 @@ private static FunctionCallContent ParseCallContentFromBinaryData(BinaryData ut8
private static T? GetValueOrDefault<T>(this AdditionalPropertiesDictionary? dict, string key) =>
dict?.TryGetValue(key, out T? value) is true ? value : default;

private static string CreateCompletionId() => $"chatcmpl-{Guid.NewGuid().ToString("N", CultureInfo.InvariantCulture)}";

/// <summary>Used to create the JSON payload for an OpenAI chat tool description.</summary>
public sealed class OpenAIChatToolJson
{
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -46,7 +46,7 @@ internal static partial class OpenAIModelMappers
}

yield return OpenAIChatModelFactory.StreamingChatCompletionUpdate(
completionId: chatCompletionUpdate.CompletionId,
completionId: chatCompletionUpdate.CompletionId ?? CreateCompletionId(),
model: chatCompletionUpdate.ModelId,
createdAt: chatCompletionUpdate.CreatedAt ?? default,
role: ToOpenAIChatRole(chatCompletionUpdate.Role),
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -185,6 +185,7 @@ public async Task BasicRequestResponse_Streaming()

for (int i = 0; i < updates.Count; i++)
{
Assert.NotNull(updates[i].CompletionId);
Assert.Equal(i < updates.Count - 1 ? 1 : 2, updates[i].Contents.Count);
Assert.Equal(ChatRole.Assistant, updates[i].Role);
Assert.Equal("llama3.1", updates[i].ModelId);
Expand Down

0 comments on commit b26772a

Please sign in to comment.