From ca7245696ae8222808c6b3b5dd7d2a18dba4086c Mon Sep 17 00:00:00 2001 From: Kumi Date: Sun, 4 Aug 2024 17:42:23 +0200 Subject: [PATCH] fix: correct max_tokens reference in OpenAI class Updated the reference to max_tokens in the truncation call from self.chat_api.max_tokens to self.max_tokens, ensuring the correct token limit is applied. This resolves potential issues with message length handling. --- src/gptbot/classes/ai/openai.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/gptbot/classes/ai/openai.py b/src/gptbot/classes/ai/openai.py index 3239879..d3b5b71 100644 --- a/src/gptbot/classes/ai/openai.py +++ b/src/gptbot/classes/ai/openai.py @@ -393,7 +393,7 @@ class OpenAI(BaseAI): # Truncate messages to fit within the token limit self._truncate( messages=chat_messages, - max_tokens=self.chat_api.max_tokens - 1, + max_tokens=self.max_tokens - 1, system_message=system_message, )