feat(logging): enhance debug logging for message processing
Added debug logging to capture incoming, prepared, and truncated messages in the OpenAI class. Also, included logging for last messages fetched in the bot class. These additions aid in the traceability and debugging of message flows and processing errors. Additionally, an option to log detailed error tracebacks in debug mode was implemented to facilitate better error analysis.
This commit is contained in:
parent
ca7245696a
commit
48decdc9e2
2 changed files with 13 additions and 1 deletions
|
@ -173,6 +173,8 @@ class OpenAI(BaseAI):
|
||||||
) -> List[Any]:
|
) -> List[Any]:
|
||||||
chat_messages = []
|
chat_messages = []
|
||||||
|
|
||||||
|
self.logger.log(f"Incoming messages: {messages}", "debug")
|
||||||
|
|
||||||
for message in messages:
|
for message in messages:
|
||||||
if isinstance(message, (RoomMessageNotice, RoomMessageText)):
|
if isinstance(message, (RoomMessageNotice, RoomMessageText)):
|
||||||
role = (
|
role = (
|
||||||
|
@ -390,6 +392,8 @@ class OpenAI(BaseAI):
|
||||||
)
|
)
|
||||||
chat_messages.append({"role": "system", "content": message_body})
|
chat_messages.append({"role": "system", "content": message_body})
|
||||||
|
|
||||||
|
self.logger.log(f"Prepared messages: {chat_messages}", "debug")
|
||||||
|
|
||||||
# Truncate messages to fit within the token limit
|
# Truncate messages to fit within the token limit
|
||||||
self._truncate(
|
self._truncate(
|
||||||
messages=chat_messages,
|
messages=chat_messages,
|
||||||
|
@ -450,6 +454,8 @@ class OpenAI(BaseAI):
|
||||||
total_tokens = len(system_message) + 1
|
total_tokens = len(system_message) + 1
|
||||||
truncated_messages = []
|
truncated_messages = []
|
||||||
|
|
||||||
|
self.logger.log(f"Messages: {messages}", "debug")
|
||||||
|
|
||||||
for message in [messages[0]] + list(reversed(messages[1:])):
|
for message in [messages[0]] + list(reversed(messages[1:])):
|
||||||
content = (
|
content = (
|
||||||
message["content"]
|
message["content"]
|
||||||
|
|
|
@ -1103,6 +1103,7 @@ class GPTBot:
|
||||||
last_messages = await self._last_n_messages(
|
last_messages = await self._last_n_messages(
|
||||||
room.room_id, self.chat_api.max_messages
|
room.room_id, self.chat_api.max_messages
|
||||||
)
|
)
|
||||||
|
self.logger.log(f"Last messages: {last_messages}", "debug")
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
self.logger.log(f"Error getting last messages: {e}", "error")
|
self.logger.log(f"Error getting last messages: {e}", "error")
|
||||||
await self.send_message(
|
await self.send_message(
|
||||||
|
@ -1113,7 +1114,7 @@ class GPTBot:
|
||||||
system_message = self.get_system_message(room)
|
system_message = self.get_system_message(room)
|
||||||
|
|
||||||
chat_messages = await self.chat_api.prepare_messages(
|
chat_messages = await self.chat_api.prepare_messages(
|
||||||
last_messages, system_message
|
event, last_messages, system_message
|
||||||
)
|
)
|
||||||
|
|
||||||
# Check for a model override
|
# Check for a model override
|
||||||
|
@ -1159,6 +1160,11 @@ class GPTBot:
|
||||||
room, "Something went wrong generating audio file.", True
|
room, "Something went wrong generating audio file.", True
|
||||||
)
|
)
|
||||||
|
|
||||||
|
if self.debug:
|
||||||
|
await self.send_message(
|
||||||
|
room, f"Error: {e}\n\n```\n{traceback.format_exc()}\n```", True
|
||||||
|
)
|
||||||
|
|
||||||
await self.send_message(room, response)
|
await self.send_message(room, response)
|
||||||
|
|
||||||
await self.matrix_client.room_typing(room.room_id, False)
|
await self.matrix_client.room_typing(room.room_id, False)
|
||||||
|
|
Loading…
Reference in a new issue