Compare commits

..

2 commits

Author SHA1 Message Date
14ad558ac6
remove .mbp 2024-04-07 22:22:50 +02:00
94d1039a9c
switch from request to aiohttp, make haste server configurable 2024-04-07 22:22:07 +02:00
4 changed files with 35 additions and 31 deletions

View file

@ -5,3 +5,4 @@ model: "dalle"
reply_in_thread: True
redis_host: "192.168.1.242"
redis_port: "6379"
haste_url: "https://haste.snrd.eu/documents"

View file

@ -3,8 +3,8 @@ import urllib
import io
import base64
import asyncio
import requests
import re
import aiohttp
from PIL import Image
from typing import Type, Deque, Dict, Generator
@ -26,6 +26,7 @@ class Config(BaseProxyConfig):
helper.copy("nickname")
helper.copy("redis_host")
helper.copy("redis_port")
helper.copy("haste_url")
class Gpt(Plugin):
@ -37,7 +38,7 @@ class Gpt(Plugin):
self.config.load_and_update()
self.name = self.config["name"]
self.nickname = self.config["nickname"]
self.history = History(self.config["redis_host"], self.config["redis_port"])
self.history = History(self.config["redis_host"], self.config["redis_port"], self.config["haste_url"])
@classmethod
def get_config_class(cls) -> Type[BaseProxyConfig]:
@ -92,7 +93,6 @@ class Gpt(Plugin):
# Call the GPT API to get picture
await self.client.set_typing(event.room_id, timeout=99999)
messages = await self.history.get(event)
print(messages)
response = await self._call_gpt(event.content["body"], messages)
@ -118,24 +118,26 @@ class Gpt(Plugin):
"prompt": prompt
}
response = requests.post("https://nexra.aryahcr.cc/api/chat/gpt", headers=headers, data=json.dumps(data))
async with aiohttp.request("POST", "https://nexra.aryahcr.cc/api/chat/gpt", headers=headers, data=json.dumps(data)) as response:
if response.status != 200:
self.log.warning(f"Unexpected status sending request to nexra.aryahcr.cc: {response.status_code}")
return
if response.status_code != 200:
self.log.warning(f"Unexpected status sending request to nexra.aryahcr.cc: {response.status_code}")
return
count = -1
response_text = await response.text()
for i in range(len(response_text)):
if count <= -1:
if response_text[i] == "{":
count = i
else:
break
count = -1
for i in range(len(response.text)):
if count <= -1:
if response.text[i] == "{":
count = i
else:
break
response_json = json.loads(response.text[count:])
content = response_json['gpt']
return content
try:
response_json = await response.json(content_type=None)
content = response_json['gpt']
return content
except e:
return e
@command.new(name="export")
async def export(self, event: MessageEvent) -> None:

View file

@ -1,12 +1,14 @@
import redis.asyncio as redis
import requests
import aiohttp
import json
class History():
r: redis.Redis()
haste_url: ""
def __init__(self, redis_host, redis_port):
def __init__(self, redis_host, redis_port, haste_url):
self.r = redis.Redis(host=redis_host, port=redis_port, db=0)
self.haste_url = haste_url
def createCacheKey(self, id):
return f"gpt-history-user-{id}"
@ -44,11 +46,10 @@ class History():
for i in range(0, len(history), 2):
text += f"You: {history[i]['content']}\nGPT: {history[i+1]['content']}\n\n"
response = requests.post("https://haste.snrd.eu/documents", data=text)
if response.status_code != 200:
return "Could not export the ChatGPT history! Please try again later!"
else:
haste = json.loads(response.text)['key']
msg = "Your ChatGPT history: https://haste.snrd.eu/raw/{}".format(haste)
return msg
async with aiohttp.request("POST", self.haste_url, data=text) as response:
if response.status != 200:
return "Could not export the ChatGPT history! Please try again later!"
else:
haste = await response.json()
msg = "Your ChatGPT history: https://haste.snrd.eu/raw/{}".format(haste["key"])
return msg

View file

@ -1,6 +1,6 @@
maubot: 0.1.0
id: sh.boehm.gpt
version: 0.0.02
version: 0.0.04
license: MIT
modules:
- gpt