Skip to content

Commit

Permalink
Merge pull request #908 from iTzScorpions/fix-grammar-example
Browse files Browse the repository at this point in the history
Fixed Issue where Model with Grammar constrains only generates one output in Example
  • Loading branch information
martindevans authored Aug 26, 2024
2 parents e618395 + 6b171e4 commit df8cc71
Showing 1 changed file with 9 additions and 6 deletions.
15 changes: 9 additions & 6 deletions LLama.Examples/Examples/GrammarJsonResponse.cs
Original file line number Diff line number Diff line change
Expand Up @@ -25,20 +25,23 @@ public static async Task Run()
Console.WriteLine("The executor has been enabled. In this example, the LLM will follow your instructions and always respond in a JSON format. For example, you can input \"Tell me the attributes of a good dish\"");
Console.ForegroundColor = ConsoleColor.White;

using var grammarInstance = grammar.CreateInstance();
var samplingPipeline = new DefaultSamplingPipeline
{
Temperature = 0.6f
};

var inferenceParams = new InferenceParams()
{
SamplingPipeline = new DefaultSamplingPipeline
{
Temperature = 0.6f,
Grammar = grammarInstance
},
SamplingPipeline = samplingPipeline,
AntiPrompts = new List<string> { "Question:", "#", "Question: ", ".\n" },
MaxTokens = 50,
};

while (true)
{
using var grammarInstance = grammar.CreateInstance();
samplingPipeline.Grammar = grammarInstance;

Console.Write("\nQuestion: ");
Console.ForegroundColor = ConsoleColor.Green;
var prompt = Console.ReadLine();
Expand Down

0 comments on commit df8cc71

Please sign in to comment.