Skip to content

Commit

Permalink
fixed-no-codeblocks-error
Browse files Browse the repository at this point in the history
Signed-off-by: devjpt23 <[email protected]>
  • Loading branch information
devjpt23 committed Oct 31, 2024
1 parent f2f6125 commit 4857944
Show file tree
Hide file tree
Showing 3 changed files with 71 additions and 14 deletions.
21 changes: 16 additions & 5 deletions kai/kai_trace.py
Original file line number Diff line number Diff line change
Expand Up @@ -129,24 +129,35 @@ def prompt(self, current_batch_count: int, prompt: str, pb_vars: dict):

@enabled_check
def llm_result(
self, current_batch_count: int, retry_count: int, result: BaseMessage
self,
current_batch_count: int,
retry_count: int,
result: BaseMessage,
output_filename: str,
):
result_file_path = os.path.join(
self.trace_dir, f"{current_batch_count}", f"{retry_count}", "llm_result"
self.trace_dir,
f"{current_batch_count}",
f"{retry_count}",
f"{output_filename}",
)
os.makedirs(os.path.dirname(result_file_path), exist_ok=True)
with open(result_file_path, "w") as f:
f.write(result.pretty_repr())
f.write(str(result))

@enabled_check
def response_metadata(
self, current_batch_count: int, retry_count: int, response_metadata: dict
self,
current_batch_count: int,
retry_count: int,
response_metadata: dict,
output_filename: str,
):
response_metadata_file_path = os.path.join(
self.trace_dir,
f"{current_batch_count}",
f"{retry_count}",
"response_metadata.json",
f"{output_filename}",
)
os.makedirs(os.path.dirname(response_metadata_file_path), exist_ok=True)
with open(response_metadata_file_path, "w") as f:
Expand Down
63 changes: 54 additions & 9 deletions kai/service/kai_application/kai_application.py
Original file line number Diff line number Diff line change
Expand Up @@ -238,27 +238,72 @@ def get_incident_solutions_for_file(
application_name,
f'{file_name.replace("/", "-")}',
):
llm_result = self.model_provider.llm.invoke(prompt)
trace.llm_result(count, retry_attempt_count, llm_result)
llm_request = [("human", prompt)]
llm_result = self.model_provider.llm.invoke(llm_request)
content = parse_file_solution_content(
src_file_language, str(llm_result.content)
)

# The LLM response must include code blocks (formatted within triple backticks) to be considered complete. Usually, the LLM responds with code blocks, but occasionally it fails to do so, as noted in issue #350 [https://github.com/konveyor/kai/issues/350] . Complete responses are saved in the trace directory directly. For incomplete responses, an additional prompt is sent to the LLM, and the resulting complete response (with code blocks) is saved in the trace directory as a new file.

if len(content.updated_file) == 0:

trace.llm_result(
count,
retry_attempt_count,
llm_result.content,
"llm_result_without_codeblocks",
)
trace.response_metadata(
count,
retry_attempt_count,
llm_result.response_metadata,
"response_metadata_without_codeblocks.json",
)
self.has_tokens_exceeded(
llm_result.response_metadata,
estimated_prompt_tokens,
file_name,
)
llm_request.append(
(
"human",
"I request you to generate a complete response.",
)
)
llm_result = self.model_provider.llm.invoke(llm_request)
content = parse_file_solution_content(
src_file_language, str(llm_result.content)
)
if not content.updated_file:
raise Exception(
"The LLM could not provide a response with complete codeblocks."
)

trace.llm_result(
count,
retry_attempt_count,
llm_result.content,
"llm_result_with_codeblocks",
)
trace.response_metadata(
count,
retry_attempt_count,
llm_result.response_metadata,
"response_metadata_with_codeblocks.json",
)
trace.estimated_tokens(
count,
retry_attempt_count,
estimated_prompt_tokens,
self.tiktoken_encoding_base,
)
trace.response_metadata(
count, retry_attempt_count, llm_result.response_metadata
)
self.has_tokens_exceeded(
llm_result.response_metadata,
estimated_prompt_tokens,
file_name,
)

content = parse_file_solution_content(
src_file_language, str(llm_result.content)
)

if not content.updated_file:
raise Exception(
f"Error in LLM Response: The LLM did not provide an updated file for {file_name}"
Expand Down
1 change: 1 addition & 0 deletions kai/service/kai_application/test_kai_application.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@

import aiohttp.web as web

from kai.models.file_solution import parse_file_solution_content

Check failure on line 6 in kai/service/kai_application/test_kai_application.py

View workflow job for this annotation

GitHub Actions / Trunk Check

ruff(F401)

[new] `kai.models.file_solution.parse_file_solution_content` imported but unused
from kai.models.kai_config import KaiConfig
from kai.models.report_types import ExtendedIncident
from kai.service.incident_store.sql_types import SQLIncident
Expand Down

0 comments on commit 4857944

Please sign in to comment.