Optimize message concatenation, add Wikipedia tool
Refactor the message concatenation logic within the chat response to ensure the original final message remains intact at the end of the sequence. Introduce a new 'Wikipedia' tool to the bot's capabilities, allowing users to query and retrieve information from Wikipedia directly through the bot's interface. This enhancement aligns with efforts to provide a more informative and interactive user experience.
This commit is contained in:
parent
3ee7505aa5
commit
c92828def1
3 changed files with 64 additions and 1 deletions
|
@ -302,7 +302,7 @@ class OpenAI:
|
|||
self.logger.log(f"No more responses received, aborting.")
|
||||
result_text = False
|
||||
else:
|
||||
messages = original_messages + [choice.message] + tool_responses
|
||||
messages = original_messages[:-1] + [choice.message] + tool_responses + original_messages[-1:]
|
||||
|
||||
result_text, additional_tokens = await self.generate_chat_response(messages, user, room)
|
||||
|
||||
|
|
|
@ -12,6 +12,7 @@ for tool in [
|
|||
"webrequest",
|
||||
"imagine",
|
||||
"imagedescription",
|
||||
"wikipedia",
|
||||
]:
|
||||
tool_class = getattr(import_module(
|
||||
"." + tool, "gptbot.tools"), tool.capitalize())
|
||||
|
|
62
src/gptbot/tools/wikipedia.py
Normal file
62
src/gptbot/tools/wikipedia.py
Normal file
|
@ -0,0 +1,62 @@
|
|||
from .base import BaseTool
|
||||
|
||||
from urllib.parse import urlencode
|
||||
|
||||
import aiohttp
|
||||
|
||||
class Wikipedia(BaseTool):
|
||||
DESCRIPTION = "Get information from Wikipedia."
|
||||
PARAMETERS = {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"query": {
|
||||
"type": "string",
|
||||
"description": "The query to search for.",
|
||||
},
|
||||
"language": {
|
||||
"type": "string",
|
||||
"description": "The language to search in.",
|
||||
"default": "en",
|
||||
},
|
||||
},
|
||||
"required": ["query"],
|
||||
}
|
||||
|
||||
async def run(self):
|
||||
"""Get information from Wikipedia."""
|
||||
if not (query := self.kwargs.get("query")):
|
||||
raise Exception('No query provided.')
|
||||
|
||||
language = self.kwargs.get("language", "en")
|
||||
extract = self.kwargs.get("extract", False)
|
||||
|
||||
args = {
|
||||
"action": "query",
|
||||
"format": "json",
|
||||
"titles": query,
|
||||
}
|
||||
|
||||
if extract:
|
||||
args["prop"] = "extracts"
|
||||
args["exintro"] = ""
|
||||
|
||||
else:
|
||||
args["prop"] = "revisions"
|
||||
args["rvprop"] = "content"
|
||||
|
||||
url = f'https://{language}.wikipedia.org/w/api.php?{urlencode(args)}'
|
||||
|
||||
async with aiohttp.ClientSession() as session:
|
||||
async with session.get(url) as response:
|
||||
if response.status == 200:
|
||||
data = await response.json()
|
||||
pages = data['query']['pages']
|
||||
page = list(pages.values())[0]
|
||||
if 'extract' in page:
|
||||
return f"**{page['title']} (Extract)**\n{page['extract']}"
|
||||
elif 'revisions' in page:
|
||||
return f"**{page['title']}**\n{page['revisions'][0]['*']}"
|
||||
else:
|
||||
raise Exception('No results found.')
|
||||
else:
|
||||
raise Exception(f'Could not connect to Wikipedia API: {response.status} {response.reason}')
|
Loading…
Reference in a new issue