Skip to content

Commit

Permalink
chore: update prompt type for more granular configuration
Browse files Browse the repository at this point in the history
Signed-off-by: Donnie Adams <[email protected]>
  • Loading branch information
thedadams committed Feb 4, 2025
1 parent d9098db commit d72985d
Show file tree
Hide file tree
Showing 2 changed files with 34 additions and 14 deletions.
18 changes: 17 additions & 1 deletion gptscript/frame.py
Original file line number Diff line number Diff line change
Expand Up @@ -194,13 +194,25 @@ def __init__(self,
self.llmResponse = llmResponse


class PromptField:
def __init__(self,
name: str = "",
description: str = "",
sensitive: bool | None = None,
**kwargs,
):
self.name = name
self.description = description
self.sensitive = sensitive


class PromptFrame:
def __init__(self,
id: str = "",
type: RunEventType = RunEventType.prompt,
time: str = "",
message: str = "",
fields: list[str] = None,
fields: list[PromptField] = None,
metadata: dict[str, str] = None,
sensitive: bool = False,
**kwargs,
Expand All @@ -209,6 +221,10 @@ def __init__(self,
self.time = time
self.message = message
self.fields = fields
if self.fields is not None:
for i in range(len(self.fields)):
if isinstance(self.fields[i], dict):
self.fields[i] = PromptField(**self.fields[i])
self.metadata = metadata
self.sensitive = sensitive
self.type = type
Expand Down
30 changes: 17 additions & 13 deletions tests/test_gptscript.py
Original file line number Diff line number Diff line change
Expand Up @@ -546,17 +546,21 @@ async def process_event(r: Run, frame: CallFrame | RunFrame | PromptFrame):
event_output += output.content
elif frame.type == RunEventType.callFinish:
call_finish_seen = True
for output in frame.output:
event_output += output.content

run = gptscript.run(os.getcwd() + "/tests/fixtures/global-tools.gpt",
cwd = os.getcwd().removesuffix("/tests")
run = gptscript.run(cwd + "/tests/fixtures/global-tools.gpt",
Options(
disableCache=True,
credentialOverrides=["github.com/gptscript-ai/gateway:OPENAI_API_KEY"],
),
event_handlers=[process_event],
)

assert "Hello!" in await run.text(), "Unexpected output from global tool test"
assert "Hello" in event_output, "Unexpected stream output from global tool test"
output = await run.text()
assert "Hello!" in output, "Unexpected output from global tool test: " + output
assert "Hello" in event_output, "Unexpected stream output from global tool test: " + event_output

assert run_start_seen and call_start_seen and call_progress_seen and call_finish_seen and run_finish_seen, \
f"One of these is False: {run_start_seen}, {call_start_seen}, {call_progress_seen}, {call_finish_seen}, {run_finish_seen}"
Expand All @@ -573,7 +577,7 @@ async def process_event(r: Run, frame: CallFrame | RunFrame | PromptFrame):
confirm_event_found = True
assert '"ls' in frame.input or '"dir' in frame.input, "Unexpected confirm input: " + frame.input
await gptscript.confirm(AuthResponse(frame.id, True))
elif frame.type == RunEventType.callProgress:
elif frame.type == RunEventType.callProgress or frame.type == RunEventType.callFinish:
for output in frame.output:
event_content += output.content

Expand Down Expand Up @@ -610,7 +614,7 @@ async def process_event(r: Run, frame: CallFrame | RunFrame | PromptFrame):
confirm_event_found = True
assert '"ls"' in frame.input, "Unexpected confirm input: " + frame.input
await gptscript.confirm(AuthResponse(frame.id, False, "I will not allow it!"))
elif frame.type == RunEventType.callProgress:
elif frame.type == RunEventType.callProgress or frame.type == RunEventType.callFinish:
for output in frame.output:
event_content += output.content

Expand All @@ -637,9 +641,9 @@ async def process_event(r: Run, frame: CallFrame | RunFrame | PromptFrame):
if frame.type == RunEventType.prompt:
prompt_event_found = True
assert len(frame.fields) == 1, "Unexpected number of fields: " + str(frame.fields)
assert "first name" in frame.fields[0], "Unexpected field: " + frame.fields[0]
await gptscript.prompt(PromptResponse(frame.id, {frame.fields[0]: "Clicky"}))
elif frame.type == RunEventType.callProgress:
assert "first name" in frame.fields[0].name, "Unexpected field: " + frame.fields[0].name
await gptscript.prompt(PromptResponse(frame.id, {frame.fields[0].name: "Clicky"}))
elif frame.type == RunEventType.callProgress or frame.type == RunEventType.callFinish:
for output in frame.output:
event_content += output.content

Expand Down Expand Up @@ -667,10 +671,10 @@ async def process_event(r: Run, frame: CallFrame | RunFrame | PromptFrame):
if frame.type == RunEventType.prompt:
prompt_event_found = True
assert len(frame.fields) == 1, "Unexpected number of fields: " + str(frame.fields)
assert "first name" in frame.fields[0], "Unexpected field: " + frame.fields[0]
assert "first name" in frame.fields[0].name, "Unexpected field: " + frame.fields[0].name
assert "first_name" in frame.metadata, "Unexpected metadata: " + str(frame.metadata)
assert frame.metadata["first_name"] == "Clicky", "Unexpected metadata: " + str(frame.metadata)
await gptscript.prompt(PromptResponse(frame.id, {frame.fields[0]: "Clicky"}))
await gptscript.prompt(PromptResponse(frame.id, {frame.fields[0].name: "Clicky"}))

out = await gptscript.run(
"sys.prompt",
Expand All @@ -691,8 +695,8 @@ async def process_event(r: Run, frame: CallFrame | RunFrame | PromptFrame):
if frame.type == RunEventType.prompt:
prompt_event_found = True
assert len(frame.fields) == 1, "Unexpected number of fields: " + str(frame.fields)
assert "first name" in frame.fields[0], "Unexpected field: " + frame.fields[0]
await gptscript.prompt(PromptResponse(frame.id, {frame.fields[0]: "Clicky"}))
assert "first name" in frame.fields[0].name, "Unexpected field: " + frame.fields[0].name
await gptscript.prompt(PromptResponse(frame.id, {frame.fields[0].name: "Clicky"}))

tool = ToolDef(
tools=["sys.prompt"],
Expand Down Expand Up @@ -727,7 +731,7 @@ async def test_run_file_with_metadata(gptscript):

@pytest.mark.asyncio
async def test_parse_with_metadata_then_run(gptscript):
cwd = os.getcwd().removesuffix("tests")
cwd = os.getcwd().removesuffix("/tests")
tools = await gptscript.parse(cwd + "/tests/fixtures/parse-with-metadata.gpt")
run = gptscript.evaluate(tools[0])
assert "200" == await run.text(), "Expect file to have correct output"
Expand Down

0 comments on commit d72985d

Please sign in to comment.