Skip to content

Commit

Permalink
fixed-no-codeblocks-error
Browse files Browse the repository at this point in the history
Signed-off-by: devjpt23 <[email protected]>
  • Loading branch information
devjpt23 committed Oct 24, 2024
1 parent 3743e0a commit 2d15b9b
Show file tree
Hide file tree
Showing 2 changed files with 54 additions and 7 deletions.
30 changes: 29 additions & 1 deletion kai/kai_trace.py
Original file line number Diff line number Diff line change
Expand Up @@ -136,7 +136,21 @@ def llm_result(
)
os.makedirs(os.path.dirname(result_file_path), exist_ok=True)
with open(result_file_path, "w") as f:
f.write(result.pretty_repr())
f.write(str(result))

@enabled_check
def llm_result_without_codeblocks(
self, current_batch_count: int, retry_count: int, result: BaseMessage
):
result_file_path = os.path.join(
self.trace_dir,
f"{current_batch_count}",
f"{retry_count}",
"llm_result_without_codeblocks",
)
os.makedirs(os.path.dirname(result_file_path), exist_ok=True)
with open(result_file_path, "w") as f:
f.write(str(result))

@enabled_check
def response_metadata(
Expand All @@ -152,6 +166,20 @@ def response_metadata(
with open(response_metadata_file_path, "w") as f:
f.write(json.dumps(response_metadata, indent=4, default=str))

@enabled_check
def response_metadata_without_codeblocks(
self, current_batch_count: int, retry_count: int, response_metadata: dict
):
response_metadata_file_path = os.path.join(
self.trace_dir,
f"{current_batch_count}",
f"{retry_count}",
"response_metadata_without_codeblocks.json",
)
os.makedirs(os.path.dirname(response_metadata_file_path), exist_ok=True)
with open(response_metadata_file_path, "w") as f:
f.write(json.dumps(response_metadata, indent=4, default=str))

@enabled_check
def exception(
self,
Expand Down
31 changes: 25 additions & 6 deletions kai/service/kai_application/kai_application.py
Original file line number Diff line number Diff line change
Expand Up @@ -180,16 +180,35 @@ def get_incident_solutions_for_file(
application_name,
f'{file_name.replace("/", "-")}',
):
llm_result = self.model_provider.llm.invoke(prompt)
trace.llm_result(count, retry_attempt_count, llm_result)
trace.response_metadata(
count, retry_attempt_count, llm_result.response_metadata
)

llm_request = [("human", prompt)]
llm_result = self.model_provider.llm.invoke(llm_request)
content = parse_file_solution_content(
src_file_language, str(llm_result.content)
)

if len(content.updated_file) == 0:
trace.llm_result_without_codeblocks(
count, retry_attempt_count, llm_result.content
)
trace.response_metadata_without_codeblocks(
count, retry_attempt_count, llm_result.response_metadata
)
llm_request.append(
(
"human",
"I request you to generate a complete response.",
)
)
llm_result = self.model_provider.llm.invoke(llm_request)
content = parse_file_solution_content(
src_file_language, str(llm_result.content)
)

trace.llm_result(count, retry_attempt_count, llm_result.content)
trace.response_metadata(
count, retry_attempt_count, llm_result.response_metadata
)

if not content.updated_file:
raise Exception(
f"Error in LLM Response: The LLM did not provide an updated file for {file_name}"
Expand Down

0 comments on commit 2d15b9b

Please sign in to comment.