|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
import google.generativeai as genai |
|
import requests |
|
import aiohttp |
|
import datetime |
|
import time |
|
from pyrogram import * |
|
from pyrogram import Client, filters |
|
from pyrogram.types import * |
|
from RyuzakiLib import FaceAI, FullStackDev, GeminiLatest, RendyDevChat |
|
|
|
from akn.utils.chat import chat_message |
|
from akn.utils.database import db |
|
from akn.utils.handler import * |
|
from akn.utils.logger import LOGS |
|
from akn.utils.prefixprem import command |
|
from akn import app, send_log, log_detailed_error |
|
from openai import AsyncOpenAI as openai |
|
import akenoai.openai as akeno |
|
from config import * |
|
|
|
from . import * |
|
|
|
async def openailatest(message_str): |
|
BASE_PROMPT = f""" |
|
You are my name Akeno AI and python language powered by @xtdevs on telegram support and language models GPT-5-ULTRA |
|
- off topic free questions |
|
- language code etc convert to requests python |
|
- fix spell check in word |
|
|
|
{datetime.datetime.now()} |
|
""" |
|
|
|
response = await akeno.OpenAI.run( |
|
api_key_openai, |
|
openai_meta=openai, |
|
model="gpt-4o-mini-2024-07-18", |
|
messages=[ |
|
{"role": "system", "content": BASE_PROMPT}, |
|
{"role": "user", "content": message_str} |
|
] |
|
) |
|
return response |
|
|
|
async def mistraai(messagestr): |
|
url = "https://private-akeno.randydev.my.id/akeno/mistralai" |
|
payload = {"args": messagestr} |
|
async with aiohttp.ClientSession() as session: |
|
async with session.post(url, json=payload) as response: |
|
if response.status != 200: |
|
return None |
|
return await response.json() |
|
|
|
|
|
async def chatgptold(messagestr): |
|
url = "https://private-akeno.randydev.my.id/ryuzaki/chatgpt-old" |
|
payload = {"query": messagestr} |
|
async with aiohttp.ClientSession() as session: |
|
async with session.post(url, json=payload) as response: |
|
if response.status != 200: |
|
return None |
|
return await response.json() |
|
|
|
@Akeno( |
|
~filters.scheduled |
|
& command(["dev"]) |
|
& filters.me |
|
& ~filters.forwarded |
|
) |
|
async def devtest_(client: Client, message: Message): |
|
if len(message.command) > 1: |
|
prompt = message.text.split(maxsplit=1)[1] |
|
elif message.reply_to_message: |
|
prompt = message.reply_to_message.text |
|
else: |
|
return await message.reply_text("Example: .dev not working ytv or request add module") |
|
await app.send_message( |
|
"@xtdevs", |
|
f"Report to Group: `{message.chat.id}` | {message.chat.title}\n" |
|
f"UserID: `{message.from_user.id}`\n" |
|
f"FirstName: `{message.from_user.first_name}`\n" |
|
f"Text Bug: `{prompt}`" |
|
) |
|
await message.reply_text("You have sent the bug to the developer. Thank you! Please check back soon.") |
|
|
|
@Akeno( |
|
~filters.scheduled |
|
& command(["cmai"]) |
|
& filters.user(1191668125) |
|
& ~filters.me |
|
& ~filters.forwarded |
|
) |
|
@Akeno( |
|
~filters.scheduled |
|
& command(["mistralai"]) |
|
& filters.me |
|
& ~filters.forwarded |
|
) |
|
async def mistralai_(client: Client, message: Message): |
|
if len(message.command) > 1: |
|
prompt = message.text.split(maxsplit=1)[1] |
|
elif message.reply_to_message: |
|
prompt = message.reply_to_message.text |
|
else: |
|
return await message.reply_text("Give ask from mistralai") |
|
try: |
|
messager = await mistraai(prompt) |
|
if messager is None: |
|
return await message.reply_text("No response") |
|
output = messager["randydev"].get("message") |
|
if len(output) > 4096: |
|
with open("chat.txt", "w+", encoding="utf8") as out_file: |
|
out_file.write(output) |
|
await message.reply_document( |
|
document="chat.txt", |
|
disable_notification=True |
|
) |
|
os.remove("chat.txt") |
|
else: |
|
await message.reply_text(output) |
|
except Exception as e: |
|
await log_detailed_error(e, where=client.me.id, who=message.chat.title) |
|
await message.reply_text(str(e)) |
|
|
|
@Akeno( |
|
~filters.scheduled |
|
& command(["caskold"]) |
|
& filters.user(1191668125) |
|
& ~filters.me |
|
& ~filters.forwarded |
|
) |
|
@Akeno( |
|
~filters.scheduled |
|
& command(["askold"]) |
|
& filters.me |
|
& ~filters.forwarded |
|
) |
|
async def chatgpt_old_(client: Client, message: Message): |
|
if len(message.command) > 1: |
|
prompt = message.text.split(maxsplit=1)[1] |
|
elif message.reply_to_message: |
|
prompt = message.reply_to_message.text |
|
else: |
|
return await message.reply_text("Give ask from CHATGPT-3") |
|
try: |
|
messager = await chatgptold(prompt) |
|
if messager is None: |
|
return await message.reply_text("No response") |
|
output = messager["randydev"].get("message") |
|
if len(output) > 4096: |
|
with open("chat.txt", "w+", encoding="utf8") as out_file: |
|
out_file.write(output) |
|
await message.reply_document( |
|
document="chat.txt", |
|
disable_notification=True |
|
) |
|
os.remove("chat.txt") |
|
else: |
|
await message.reply_text(output) |
|
except Exception as e: |
|
await log_detailed_error(e, where=client.me.id, who=message.chat.title) |
|
await message.reply_text(str(e)) |
|
|
|
@Akeno( |
|
~filters.scheduled |
|
& command(["cask"]) |
|
& filters.user(1191668125) |
|
& ~filters.me |
|
& ~filters.forwarded |
|
) |
|
@Akeno( |
|
~filters.scheduled |
|
& command(["ask"]) |
|
& filters.me |
|
& ~filters.forwarded |
|
) |
|
async def askren(client: Client, message: Message): |
|
if len(message.command) > 1: |
|
prompt = message.text.split(maxsplit=1)[1] |
|
elif message.reply_to_message: |
|
prompt = message.reply_to_message.text |
|
else: |
|
return await message.reply_text("Give ask from GPT-5") |
|
try: |
|
response = await openailatest(prompt) |
|
output = response |
|
if len(output) > 4096: |
|
with open("chat.txt", "w+", encoding="utf8") as out_file: |
|
out_file.write(output) |
|
await message.reply_document( |
|
document="chat.txt", |
|
disable_notification=True |
|
) |
|
os.remove("chat.txt") |
|
else: |
|
await message.reply_text(output) |
|
except Exception as e: |
|
await log_detailed_error(e, where=client.me.id, who=message.chat.title) |
|
await message.reply_text(str(e)) |
|
|
|
module = modules_help.add_module("chatgpt", __file__) |
|
module.add_command("ask", "to question from chatgpt-4o") |
|
module.add_command("askold", "to question from chatgpt-3") |
|
module.add_command("mistralai", "to question from mistralai") |