Update Akeno/plugins/chatgpt.py
Browse files- Akeno/plugins/chatgpt.py +54 -0
Akeno/plugins/chatgpt.py
CHANGED
@@ -18,6 +18,7 @@
|
|
18 |
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
19 |
|
20 |
import requests
|
|
|
21 |
from pyrogram import *
|
22 |
from pyrogram import Client, filters
|
23 |
from pyrogram.types import *
|
@@ -47,6 +48,59 @@ async def chatgptold(messagestr):
|
|
47 |
return None
|
48 |
return response.json()
|
49 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
50 |
@Akeno(
|
51 |
~filters.scheduled
|
52 |
& filters.command(["addchatbot"], CMD_HANDLER)
|
|
|
18 |
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
19 |
|
20 |
import requests
|
21 |
+
import json
|
22 |
from pyrogram import *
|
23 |
from pyrogram import Client, filters
|
24 |
from pyrogram.types import *
|
|
|
48 |
return None
|
49 |
return response.json()
|
50 |
|
51 |
+
def akeno_ai_send_message(message):
|
52 |
+
url = "https://ai.randydev.my.id/backend-api/v2/conversation"
|
53 |
+
payload = {
|
54 |
+
"api_key": "",
|
55 |
+
"auto_continue": True,
|
56 |
+
"conversation_id": "c527f012-ae20-47c1-881e-914617b5d995",
|
57 |
+
"id": "7411834416398216880",
|
58 |
+
"model": "",
|
59 |
+
"provider": "",
|
60 |
+
"messages": [{"role": "user", "content": message}]
|
61 |
+
}
|
62 |
+
response = requests.post(url, json=payload)
|
63 |
+
response_lines = response.text.strip().split("\n")
|
64 |
+
contents = []
|
65 |
+
for line in response_lines:
|
66 |
+
try:
|
67 |
+
json_obj = json.loads(line)
|
68 |
+
if json_obj.get("type") == "content":
|
69 |
+
contents.append(json_obj.get("content"))
|
70 |
+
except json.JSONDecodeError:
|
71 |
+
return f"Error: {line}"
|
72 |
+
full_content = ''.join(contents)
|
73 |
+
return full_content
|
74 |
+
|
75 |
+
@Akeno(
|
76 |
+
~filters.scheduled
|
77 |
+
& filters.command(["askakeno"], CMD_HANDLER)
|
78 |
+
& filters.me
|
79 |
+
& ~filters.forwarded
|
80 |
+
)
|
81 |
+
async def akenoai_(client: Client, message: Message):
|
82 |
+
if len(message.command) > 1:
|
83 |
+
prompt = message.text.split(maxsplit=1)[1]
|
84 |
+
elif message.reply_to_message:
|
85 |
+
prompt = message.reply_to_message.text
|
86 |
+
else:
|
87 |
+
return await message.reply_text("Give ask from FaceAI")
|
88 |
+
try:
|
89 |
+
response = akeno_ai_send_message(message)
|
90 |
+
if len(response) > 4096:
|
91 |
+
with open("chat.txt", "w+", encoding="utf8") as out_file:
|
92 |
+
out_file.write(response)
|
93 |
+
await message.reply_document(
|
94 |
+
document="chat.txt",
|
95 |
+
disable_notification=True
|
96 |
+
)
|
97 |
+
os.remove("chat.txt")
|
98 |
+
else:
|
99 |
+
await message.reply_text(response)
|
100 |
+
except Exception as e:
|
101 |
+
LOGS.error(str(e))
|
102 |
+
return await message.reply_text(str(e)
|
103 |
+
|
104 |
@Akeno(
|
105 |
~filters.scheduled
|
106 |
& filters.command(["addchatbot"], CMD_HANDLER)
|