Skip to content

Commit

Permalink
Update logging
Browse files Browse the repository at this point in the history
  • Loading branch information
NeonDaniel committed Feb 19, 2025
1 parent c139a5d commit d21bbb3
Showing 1 changed file with 5 additions and 2 deletions.
7 changes: 5 additions & 2 deletions neon_llm_core/chatbot.py
Original file line number Diff line number Diff line change
Expand Up @@ -149,6 +149,7 @@ def _get_llm_api_response(self, shout: str) -> Optional[LLMProposeResponse]:
response_queue = f"{queue}.response.{uuid4().hex}"

try:
# TODO This is logged 1x per persona
LOG.info(f"Sending to {self.mq_queue_config.vhost}/{queue} for "
f"persona={self.persona}")

Expand Down Expand Up @@ -178,7 +179,8 @@ def _get_llm_api_opinion(self, prompt: str, options: dict) -> Optional[LLMDiscus
response_queue = f"{queue}.response.{uuid4().hex}"

try:
LOG.info(f"Sending to {self.mq_queue_config.vhost}/{queue}")
LOG.info(f"Sending to {self.mq_queue_config.vhost}/{queue} for "
f"persona={self.persona}")

request_data = LLMDiscussRequest(model=self.base_llm,
persona=self.persona,
Expand Down Expand Up @@ -211,7 +213,8 @@ def _get_llm_api_choice(self, prompt: str,
response_queue = f"{queue}.response.{uuid4().hex}"

try:
LOG.debug(f"Sending to {self.mq_queue_config.vhost}/{queue}")
LOG.debug(f"Sending to {self.mq_queue_config.vhost}/{queue} for "
f"persona={self.persona}")

request_data = LLMVoteRequest(model=self.base_llm,
persona=self.persona,
Expand Down

0 comments on commit d21bbb3

Please sign in to comment.