switch from request to aiohttp, make haste server configurable

This commit is contained in:
Philipp 2024-04-07 22:22:07 +02:00
parent 2c838d5456
commit 94d1039a9c
Signed by: Philipp
GPG key ID: 9EBD8439AFBAB750
5 changed files with 35 additions and 31 deletions

View file

@ -5,3 +5,4 @@ model: "dalle"
reply_in_thread: True reply_in_thread: True
redis_host: "192.168.1.242" redis_host: "192.168.1.242"
redis_port: "6379" redis_port: "6379"
haste_url: "https://haste.snrd.eu/documents"

View file

@ -3,8 +3,8 @@ import urllib
import io import io
import base64 import base64
import asyncio import asyncio
import requests
import re import re
import aiohttp
from PIL import Image from PIL import Image
from typing import Type, Deque, Dict, Generator from typing import Type, Deque, Dict, Generator
@ -26,6 +26,7 @@ class Config(BaseProxyConfig):
helper.copy("nickname") helper.copy("nickname")
helper.copy("redis_host") helper.copy("redis_host")
helper.copy("redis_port") helper.copy("redis_port")
helper.copy("haste_url")
class Gpt(Plugin): class Gpt(Plugin):
@ -37,7 +38,7 @@ class Gpt(Plugin):
self.config.load_and_update() self.config.load_and_update()
self.name = self.config["name"] self.name = self.config["name"]
self.nickname = self.config["nickname"] self.nickname = self.config["nickname"]
self.history = History(self.config["redis_host"], self.config["redis_port"]) self.history = History(self.config["redis_host"], self.config["redis_port"], self.config["haste_url"])
@classmethod @classmethod
def get_config_class(cls) -> Type[BaseProxyConfig]: def get_config_class(cls) -> Type[BaseProxyConfig]:
@ -92,7 +93,6 @@ class Gpt(Plugin):
# Call the GPT API to get picture # Call the GPT API to get picture
await self.client.set_typing(event.room_id, timeout=99999) await self.client.set_typing(event.room_id, timeout=99999)
messages = await self.history.get(event) messages = await self.history.get(event)
print(messages)
response = await self._call_gpt(event.content["body"], messages) response = await self._call_gpt(event.content["body"], messages)
@ -118,24 +118,26 @@ class Gpt(Plugin):
"prompt": prompt "prompt": prompt
} }
response = requests.post("https://nexra.aryahcr.cc/api/chat/gpt", headers=headers, data=json.dumps(data)) async with aiohttp.request("POST", "https://nexra.aryahcr.cc/api/chat/gpt", headers=headers, data=json.dumps(data)) as response:
if response.status != 200:
self.log.warning(f"Unexpected status sending request to nexra.aryahcr.cc: {response.status_code}")
return
count = -1
response_text = await response.text()
for i in range(len(response_text)):
if count <= -1:
if response_text[i] == "{":
count = i
else:
break
if response.status_code != 200: try:
self.log.warning(f"Unexpected status sending request to nexra.aryahcr.cc: {response.status_code}") response_json = await response.json(content_type=None)
return content = response_json['gpt']
return content
count = -1 except e:
for i in range(len(response.text)): return e
if count <= -1:
if response.text[i] == "{":
count = i
else:
break
response_json = json.loads(response.text[count:])
content = response_json['gpt']
return content
@command.new(name="export") @command.new(name="export")
async def export(self, event: MessageEvent) -> None: async def export(self, event: MessageEvent) -> None:

View file

@ -1,12 +1,14 @@
import redis.asyncio as redis import redis.asyncio as redis
import requests import aiohttp
import json import json
class History(): class History():
r: redis.Redis() r: redis.Redis()
haste_url: ""
def __init__(self, redis_host, redis_port): def __init__(self, redis_host, redis_port, haste_url):
self.r = redis.Redis(host=redis_host, port=redis_port, db=0) self.r = redis.Redis(host=redis_host, port=redis_port, db=0)
self.haste_url = haste_url
def createCacheKey(self, id): def createCacheKey(self, id):
return f"gpt-history-user-{id}" return f"gpt-history-user-{id}"
@ -44,11 +46,10 @@ class History():
for i in range(0, len(history), 2): for i in range(0, len(history), 2):
text += f"You: {history[i]['content']}\nGPT: {history[i+1]['content']}\n\n" text += f"You: {history[i]['content']}\nGPT: {history[i+1]['content']}\n\n"
response = requests.post("https://haste.snrd.eu/documents", data=text) async with aiohttp.request("POST", self.haste_url, data=text) as response:
if response.status != 200:
if response.status_code != 200: return "Could not export the ChatGPT history! Please try again later!"
return "Could not export the ChatGPT history! Please try again later!" else:
else: haste = await response.json()
haste = json.loads(response.text)['key'] msg = "Your ChatGPT history: https://haste.snrd.eu/raw/{}".format(haste["key"])
msg = "Your ChatGPT history: https://haste.snrd.eu/raw/{}".format(haste) return msg
return msg

View file

@ -1,6 +1,6 @@
maubot: 0.1.0 maubot: 0.1.0
id: sh.boehm.gpt id: sh.boehm.gpt
version: 0.0.02 version: 0.0.04
license: MIT license: MIT
modules: modules:
- gpt - gpt

BIN
sh.boehm.gpt-v0.0.4.mbp Normal file

Binary file not shown.