AI plugins updated, now html formatted
This commit is contained in:
parent
eb81f7aa67
commit
5962eb53ad
@ -8,6 +8,7 @@ import requests
|
||||
import json
|
||||
import simplematrixbotlib as botlib
|
||||
import re
|
||||
import markdown2
|
||||
|
||||
async def handle_command(room, message, bot, prefix, config):
|
||||
"""
|
||||
@ -56,9 +57,10 @@ async def handle_command(room, message, bot, prefix, config):
|
||||
response.raise_for_status() # Raise HTTPError for bad responses
|
||||
payload = response.json()
|
||||
new_text = payload['choices'][0]['text']
|
||||
if new_text.count('\n') > 1: # Check if new_text has more than one paragraph
|
||||
new_text = new_text.replace("\n", '<br>')
|
||||
new_text = re.sub(r"\*\*(.*?)\*\*", r"<strong>\1</strong>", new_text)
|
||||
new_text = markdown_to_html(new_text)
|
||||
if new_text.count('<p>') > 2: # Check if new_text has more than one paragraph
|
||||
#new_text = new_text.replace("\n", '<br>')
|
||||
#new_text = re.sub(r"\*\*(.*?)\*\*", r"<strong>\1</strong>", new_text)
|
||||
new_text = "<details><summary><strong>🎵Funguy Music GPT🎵<br>⤵︎Click Here To See Funguy's Response⤵︎</strong></summary>" + new_text + "</details>"
|
||||
await bot.api.send_markdown_message(room.room_id, new_text)
|
||||
else:
|
||||
@ -67,3 +69,8 @@ async def handle_command(room, message, bot, prefix, config):
|
||||
except requests.exceptions.RequestException as e:
|
||||
logging.error(f"HTTP request failed for '{prompt}': {e}")
|
||||
await bot.api.send_text_message(room.room_id, f"Error generating text: {e}")
|
||||
|
||||
|
||||
def markdown_to_html(markdown_text):
|
||||
html_content = markdown2.markdown(markdown_text)
|
||||
return html_content
|
||||
|
@ -9,6 +9,7 @@ import requests
|
||||
import json
|
||||
import simplematrixbotlib as botlib
|
||||
import re
|
||||
import markdown2
|
||||
|
||||
async def handle_command(room, message, bot, prefix, config):
|
||||
"""
|
||||
@ -57,9 +58,10 @@ async def handle_command(room, message, bot, prefix, config):
|
||||
response.raise_for_status() # Raise HTTPError for bad responses
|
||||
payload = response.json()
|
||||
new_text = payload['choices'][0]['text']
|
||||
if new_text.count('\n') > 2: # Check if new_text has more than one paragraph
|
||||
new_text = new_text.replace("\n", '<br>')
|
||||
new_text = re.sub(r"\*\*(.*?)\*\*", r"<strong>\1</strong>", new_text)
|
||||
new_text = markdown_to_html(new_text)
|
||||
if new_text.count('<p>') > 2: # Check if new_text has more than one paragraph
|
||||
#new_text = new_text.replace("\n", '<br>')
|
||||
#new_text = re.sub(r"\*\*(.*?)\*\*", r"<strong>\1</strong>", new_text)
|
||||
new_text = "<details><summary><strong>🍄Funguy Tech GPT🍄<br>⤵︎Click Here To See Funguy's Response⤵︎</strong></summary>" + new_text + "</details>"
|
||||
await bot.api.send_markdown_message(room.room_id, new_text)
|
||||
else:
|
||||
@ -68,3 +70,7 @@ async def handle_command(room, message, bot, prefix, config):
|
||||
except requests.exceptions.RequestException as e:
|
||||
logging.error(f"HTTP request failed for '{prompt}': {e}")
|
||||
await bot.api.send_text_message(room.room_id, f"Error generating text: {e}")
|
||||
|
||||
def markdown_to_html(markdown_text):
|
||||
html_content = markdown2.markdown(markdown_text)
|
||||
return html_content
|
||||
|
@ -3,3 +3,4 @@ requests
|
||||
pytube
|
||||
duckduckgo_search
|
||||
nio
|
||||
markdown2
|
||||
|
Loading…
Reference in New Issue
Block a user