FunguyBot/plugins/ai.py

116 lines
3.5 KiB
Python
Raw Normal View History

2024-03-03 23:34:39 +00:00
"""
This plugin provides commands to interact with different AI models.
"""
import logging
import requests
import json
import simplematrixbotlib as botlib
import re
import markdown2
async def handle_command(room, message, bot, prefix, config):
"""
Function to handle AI commands.
Args:
room (Room): The Matrix room where the command was invoked.
message (RoomMessage): The message object containing the command.
bot (Bot): The bot object.
prefix (str): The command prefix.
config (dict): Configuration parameters.
2024-03-03 23:34:39 +00:00
Returns:
None
"""
match = botlib.MessageMatch(room, message, bot, prefix)
if match.is_not_from_this_bot() and match.prefix():
logging.info(f"Received command: {match.command()}")
command = match.command()
conf = load_config()
if command in conf:
await handle_ai_command(room, bot, command, match.args(), conf)
async def handle_ai_command(room, bot, command, args, config):
"""
Function to handle AI commands.
Args:
room (Room): The Matrix room where the command was invoked.
bot (Bot): The bot object.
command (str): The name of the AI model command.
args (list): List of arguments provided with the command.
config (dict): Configuration parameters.
2024-03-03 23:34:39 +00:00
Returns:
None
"""
if len(args) < 1:
await bot.api.send_text_message(room.room_id, f"Usage: !{command} [prompt]")
logging.info("Sent usage message to the room")
return
prompt = ' '.join(args)
# Prepare data for the API request
url = "http://127.0.0.1:5000/v1/completions"
headers = {
"Content-Type": "application/json"
}
data = {
"prompt": f"<s>[INST]{config[command]['prompt']}{prompt}[/INST]",
"max_tokens": 1024,
"temperature": config[command]["temperature"],
"top_p": config[command]["top_p"],
"top_k": config[command]["top_k"],
"repetition_penalty": config[command]["repetition_penalty"],
"seed": -1,
"stream": False
}
# Make HTTP request to the API endpoint
try:
response = requests.post(url, headers=headers, json=data, verify=False)
response.raise_for_status() # Raise HTTPError for bad responses
payload = response.json()
new_text = payload['choices'][0]['text']
new_text = markdown_to_html(new_text)
if new_text.count('<p>') > 1 or new_text.count('<li>') > 1: # Check if new_text has more than one paragraph
new_text = f"<details><summary><strong>{config[command]['summary']}</strong></summary>{new_text}</details>"
await bot.api.send_markdown_message(room.room_id, new_text)
else:
await bot.api.send_markdown_message(room.room_id, new_text)
logging.info("Sent generated text to the room")
except requests.exceptions.RequestException as e:
logging.error(f"HTTP request failed for '{prompt}': {e}")
await bot.api.send_text_message(room.room_id, f"Error generating text: {e}")
def markdown_to_html(markdown_text):
"""
Convert Markdown text to HTML.
Args:
markdown_text (str): Markdown formatted text.
Returns:
str: HTML formatted text.
"""
2024-03-03 23:34:39 +00:00
html_content = markdown2.markdown(markdown_text)
return html_content
def load_config():
"""
Load configuration from ai.json file.
Returns:
dict: Configuration parameters.
"""
with open("plugins/ai.json", "r") as f:
config = json.load(f)
return config
CONFIG = load_config()