Compare commits

..

No commits in common. "14ad558ac6294c2062042fb632d4c167d97270bf" and "2c838d5456497466a97b8899643fa38725ce6dd8" have entirely different histories.

4 changed files with 31 additions and 35 deletions

View file

@ -5,4 +5,3 @@ model: "dalle"
reply_in_thread: True
redis_host: "192.168.1.242"
redis_port: "6379"
haste_url: "https://haste.snrd.eu/documents"

View file

@ -3,8 +3,8 @@ import urllib
import io
import base64
import asyncio
import requests
import re
import aiohttp
from PIL import Image
from typing import Type, Deque, Dict, Generator
@ -26,7 +26,6 @@ class Config(BaseProxyConfig):
helper.copy("nickname")
helper.copy("redis_host")
helper.copy("redis_port")
helper.copy("haste_url")
class Gpt(Plugin):
@ -38,7 +37,7 @@ class Gpt(Plugin):
self.config.load_and_update()
self.name = self.config["name"]
self.nickname = self.config["nickname"]
self.history = History(self.config["redis_host"], self.config["redis_port"], self.config["haste_url"])
self.history = History(self.config["redis_host"], self.config["redis_port"])
@classmethod
def get_config_class(cls) -> Type[BaseProxyConfig]:
@ -93,6 +92,7 @@ class Gpt(Plugin):
# Call the GPT API to get picture
await self.client.set_typing(event.room_id, timeout=99999)
messages = await self.history.get(event)
print(messages)
response = await self._call_gpt(event.content["body"], messages)
@ -118,26 +118,24 @@ class Gpt(Plugin):
"prompt": prompt
}
async with aiohttp.request("POST", "https://nexra.aryahcr.cc/api/chat/gpt", headers=headers, data=json.dumps(data)) as response:
if response.status != 200:
self.log.warning(f"Unexpected status sending request to nexra.aryahcr.cc: {response.status_code}")
return
response = requests.post("https://nexra.aryahcr.cc/api/chat/gpt", headers=headers, data=json.dumps(data))
count = -1
response_text = await response.text()
for i in range(len(response_text)):
if count <= -1:
if response_text[i] == "{":
count = i
else:
break
if response.status_code != 200:
self.log.warning(f"Unexpected status sending request to nexra.aryahcr.cc: {response.status_code}")
return
try:
response_json = await response.json(content_type=None)
content = response_json['gpt']
return content
except e:
return e
count = -1
for i in range(len(response.text)):
if count <= -1:
if response.text[i] == "{":
count = i
else:
break
response_json = json.loads(response.text[count:])
content = response_json['gpt']
return content
@command.new(name="export")
async def export(self, event: MessageEvent) -> None:

View file

@ -1,14 +1,12 @@
import redis.asyncio as redis
import aiohttp
import requests
import json
class History():
r: redis.Redis()
haste_url: ""
def __init__(self, redis_host, redis_port, haste_url):
def __init__(self, redis_host, redis_port):
self.r = redis.Redis(host=redis_host, port=redis_port, db=0)
self.haste_url = haste_url
def createCacheKey(self, id):
return f"gpt-history-user-{id}"
@ -46,10 +44,11 @@ class History():
for i in range(0, len(history), 2):
text += f"You: {history[i]['content']}\nGPT: {history[i+1]['content']}\n\n"
async with aiohttp.request("POST", self.haste_url, data=text) as response:
if response.status != 200:
return "Could not export the ChatGPT history! Please try again later!"
else:
haste = await response.json()
msg = "Your ChatGPT history: https://haste.snrd.eu/raw/{}".format(haste["key"])
return msg
response = requests.post("https://haste.snrd.eu/documents", data=text)
if response.status_code != 200:
return "Could not export the ChatGPT history! Please try again later!"
else:
haste = json.loads(response.text)['key']
msg = "Your ChatGPT history: https://haste.snrd.eu/raw/{}".format(haste)
return msg

View file

@ -1,6 +1,6 @@
maubot: 0.1.0
id: sh.boehm.gpt
version: 0.0.04
version: 0.0.02
license: MIT
modules:
- gpt