LLM plugin added

This commit is contained in:
Hash Borgir 2024-02-12 20:19:03 -07:00
parent 18b5e37fd9
commit 2b7e7db27b

59
plugins/llm.py Normal file
View File

@ -0,0 +1,59 @@
# plugins/llm.py
import logging
import requests
import json
import simplematrixbotlib as botlib
async def handle_command(room, message, bot, PREFIX):
"""
Function to handle the !gpt command.
Args:
room (Room): The Matrix room where the command was invoked.
message (RoomMessage): The message object containing the command.
bot (Bot): The bot object.
PREFIX (str): The command prefix.
Returns:
None
"""
match = botlib.MessageMatch(room, message, bot, PREFIX)
if match.is_not_from_this_bot() and match.prefix() and match.command("gpt"):
logging.info("Received !gpt command")
args = match.args()
if len(args) < 1:
await bot.api.send_text_message(room.room_id, "Usage: !gpt <prompt>")
logging.info("Sent usage message to the room")
return
prompt = ' '.join(args)
# Prepare data for the API request
url = "http://127.0.0.1:5000/v1/completions"
headers = {
"Content-Type": "application/json"
}
data = {
"prompt": prompt,
"max_tokens": 4096,
"temperature": 2,
"top_p": 0.14,
"top_k": 49,
"seed": -1,
"stream": False,
"repetition_penalty": 1.17
}
# Make HTTP request to the API endpoint
try:
response = requests.post(url, headers=headers, json=data, verify=False)
response.raise_for_status() # Raise HTTPError for bad responses
payload = response.json()
new_text = payload['choices'][0]['text']
await bot.api.send_markdown_message(room.room_id, new_text)
logging.info("Sent generated text to the room")
except requests.exceptions.RequestException as e:
logging.error(f"HTTP request failed for '{prompt}': {e}")
await bot.api.send_text_message(room.room_id, f"Error generating text: {e}")