From 9c2c4d5f6f463d41baddbeb2cf9863a476755639 Mon Sep 17 00:00:00 2001 From: Justin Date: Fri, 19 May 2023 11:26:07 -0500 Subject: [PATCH] Run OpenAI calls async --- classes/bot.py | 15 +++++++++++++-- commands/classify.py | 21 +++++++++++++++++++-- commands/imagine.py | 21 +++++++++++++++++++-- 3 files changed, 51 insertions(+), 6 deletions(-) diff --git a/classes/bot.py b/classes/bot.py index bbde9b2..149f930 100644 --- a/classes/bot.py +++ b/classes/bot.py @@ -2,6 +2,7 @@ import markdown2 import duckdb import tiktoken import asyncio +import functools from PIL import Image @@ -780,8 +781,18 @@ class GPTBot: chat_messages, self.max_tokens - 1, system_message=system_message) try: - response, tokens_used = self.chat_api.generate_chat_response( - chat_messages, user=room.room_id) + loop = asyncio.get_event_loop() + except Exception as e: + self.logger.log(f"Error getting event loop: {e}", "error") + await self.send_message( + room, "Something went wrong. Please try again.", True) + return + + try: + chat_partial = functools.partial(self.chat_api.generate_chat_response, truncated_messages, user=room.room_id) + response, tokens_used = await loop.run_in_executor(None, chat_partial) + # response, tokens_used = self.chat_api.generate_chat_response( + # chat_messages, user=room.room_id) except Exception as e: self.logger.log(f"Error generating response: {e}", "error") await self.send_message( diff --git a/commands/classify.py b/commands/classify.py index e5813ff..fba23a7 100644 --- a/commands/classify.py +++ b/commands/classify.py @@ -1,3 +1,6 @@ +import asyncio +import functools + from nio.events.room_events import RoomMessageText from nio.rooms import MatrixRoom @@ -8,7 +11,21 @@ async def command_classify(room: MatrixRoom, event: RoomMessageText, bot): if prompt: bot.logger.log("Classifying message...") - response, tokens_used = bot.classification_api.classify_message(prompt, user=room.room_id) + try: + loop = asyncio.get_event_loop() + except Exception as e: + bot.logger.log(f"Error getting event loop: {e}", "error") + await bot.send_message( + room, "Something went wrong. Please try again.", True) + return + + try: + classify_partial = functools.partial(bot.classification_api.classify_message, prompt, user=room.room_id) + response, tokens_used = await loop.run_in_executor(None, classify_partial) + except Exception as e: + bot.logger.log(f"Error classifying message: {e}", "error") + await bot.send_message(room, "Sorry, I couldn't classify the message. Please try again later.", True) + return message = f"The message you provided seems to be of type: {response['type']}." @@ -21,4 +38,4 @@ async def command_classify(room: MatrixRoom, event: RoomMessageText, bot): return - await bot.send_message(room, "You need to provide a prompt.", True) \ No newline at end of file + await bot.send_message(room, "You need to provide a prompt.", True) diff --git a/commands/imagine.py b/commands/imagine.py index 54e6c71..90b7930 100644 --- a/commands/imagine.py +++ b/commands/imagine.py @@ -1,3 +1,6 @@ +import asyncio +import functools + from nio.events.room_events import RoomMessageText from nio.rooms import MatrixRoom @@ -8,7 +11,21 @@ async def command_imagine(room: MatrixRoom, event: RoomMessageText, bot): if prompt: bot.logger.log("Generating image...") - images, tokens_used = bot.image_api.generate_image(prompt, user=room.room_id) + try: + loop = asyncio.get_event_loop() + except Exception as e: + bot.logger.log(f"Error getting event loop: {e}", "error") + await bot.send_message( + room, "Something went wrong. Please try again.", True) + return + + try: + image_partial = functools.partial(bot.image_api.generate_image, prompt, user=room.room_id) + images, tokens_used = await loop.run_in_executor(None, image_partial) + except Exception as e: + bot.logger.log(f"Error generating image: {e}", "error") + await bot.send_message(room, "Sorry, I couldn't generate an image. Please try again later.", True) + return for image in images: bot.logger.log(f"Sending image...") @@ -18,4 +35,4 @@ async def command_imagine(room: MatrixRoom, event: RoomMessageText, bot): return - await bot.send_message(room, "You need to provide a prompt.", True) \ No newline at end of file + await bot.send_message(room, "You need to provide a prompt.", True)