diff --git a/src/gptbot/classes/openai.py b/src/gptbot/classes/openai.py index ba4a197..0264801 100644 --- a/src/gptbot/classes/openai.py +++ b/src/gptbot/classes/openai.py @@ -302,7 +302,7 @@ class OpenAI: self.logger.log(f"No more responses received, aborting.") result_text = False else: - messages = original_messages + [choice.message] + tool_responses + messages = original_messages[:-1] + [choice.message] + tool_responses + original_messages[-1:] result_text, additional_tokens = await self.generate_chat_response(messages, user, room) diff --git a/src/gptbot/tools/__init__.py b/src/gptbot/tools/__init__.py index e6c44d0..8ade706 100644 --- a/src/gptbot/tools/__init__.py +++ b/src/gptbot/tools/__init__.py @@ -12,6 +12,7 @@ for tool in [ "webrequest", "imagine", "imagedescription", + "wikipedia", ]: tool_class = getattr(import_module( "." + tool, "gptbot.tools"), tool.capitalize()) diff --git a/src/gptbot/tools/wikipedia.py b/src/gptbot/tools/wikipedia.py new file mode 100644 index 0000000..ae4b266 --- /dev/null +++ b/src/gptbot/tools/wikipedia.py @@ -0,0 +1,62 @@ +from .base import BaseTool + +from urllib.parse import urlencode + +import aiohttp + +class Wikipedia(BaseTool): + DESCRIPTION = "Get information from Wikipedia." + PARAMETERS = { + "type": "object", + "properties": { + "query": { + "type": "string", + "description": "The query to search for.", + }, + "language": { + "type": "string", + "description": "The language to search in.", + "default": "en", + }, + }, + "required": ["query"], + } + + async def run(self): + """Get information from Wikipedia.""" + if not (query := self.kwargs.get("query")): + raise Exception('No query provided.') + + language = self.kwargs.get("language", "en") + extract = self.kwargs.get("extract", False) + + args = { + "action": "query", + "format": "json", + "titles": query, + } + + if extract: + args["prop"] = "extracts" + args["exintro"] = "" + + else: + args["prop"] = "revisions" + args["rvprop"] = "content" + + url = f'https://{language}.wikipedia.org/w/api.php?{urlencode(args)}' + + async with aiohttp.ClientSession() as session: + async with session.get(url) as response: + if response.status == 200: + data = await response.json() + pages = data['query']['pages'] + page = list(pages.values())[0] + if 'extract' in page: + return f"**{page['title']} (Extract)**\n{page['extract']}" + elif 'revisions' in page: + return f"**{page['title']}**\n{page['revisions'][0]['*']}" + else: + raise Exception('No results found.') + else: + raise Exception(f'Could not connect to Wikipedia API: {response.status} {response.reason}') \ No newline at end of file