randydev commited on
Commit
6a05ecc
·
verified ·
1 Parent(s): e94c78d

Update Akeno/__main__.py

Browse files
Files changed (1) hide show
  1. Akeno/__main__.py +71 -260
Akeno/__main__.py CHANGED
@@ -1,272 +1,83 @@
1
- import asyncio
2
- import logging
3
- import os
 
 
 
 
 
 
 
4
 
5
- from pyrogram import Client, filters
6
- from pyrogram import *
7
- from pyrogram.types import Message
 
 
 
 
8
 
9
- # Your other imports
10
- from dotenv import load_dotenv
11
- from database import db
12
- from logger import LOGS
13
- from RyuzakiLib import GeminiLatest # and other imports as needed
14
- import google.generativeai as genai
15
- from google.api_core.exceptions import InvalidArgument
16
 
17
- # Load environment variables
18
- load_dotenv()
19
- API_ID = os.getenv("API_ID")
20
- API_HASH = os.getenv("API_HASH")
21
- BOT_TOKEN = os.getenv("BOT_TOKEN")
22
- GOOGLE_API_KEY = os.getenv("GOOGLE_API_KEY")
23
 
24
- # Validate essential environment variables
25
- if not all([API_ID, API_HASH, BOT_TOKEN, GOOGLE_API_KEY]):
26
- LOGS.critical("Missing one or more essential environment variables.")
27
- exit(1)
28
 
29
- # Initialize Pyrogram Client
30
- client = Client(
31
- "chatbotai",
32
- api_id=API_ID,
33
- api_hash=API_HASH,
34
- bot_token=BOT_TOKEN
35
- )
36
 
37
- # Define your handler
38
- @client.on_message(
39
- filters.incoming
40
- & (
41
- filters.text
42
- | filters.photo
43
- | filters.video
44
- | filters.audio
45
- | filters.voice
46
- | filters.regex(r"\b(Randy|Rendi)\b(.*)", flags=re.IGNORECASE)
47
- )
48
- & filters.private
49
- & ~filters.via_bot
50
- & ~filters.forwarded,
51
- group=2,
52
- )
53
- async def chatbot_talk(client: Client, message: Message):
54
  try:
55
-
56
- genai.configure(api_key=GOOGLE_API_KEY)
57
-
58
- # Handling Photo Messages
59
- if message.photo:
60
- file_path = await message.download()
61
- caption = message.caption or "What's this?"
62
- x = GeminiLatest(api_keys=GOOGLE_API_KEY)
63
-
64
- # Send initial processing message
65
- ai_reply = await message.reply_text("Processing...")
66
-
67
- try:
68
- backup_chat = await db._get_chatbot_chat_from_db(message.from_user.id)
69
- backup_chat.append({"role": "user", "parts": [{"text": caption}]})
70
-
71
- response_reads = x.get_response_image(caption, file_path)
72
-
73
- if len(response_reads) > 4096:
74
- with open("chat.txt", "w+", encoding="utf8") as out_file:
75
- out_file.write(response_reads)
76
- await message.reply_document(
77
- document="chat.txt",
78
- disable_notification=True
79
- )
80
- await ai_reply.delete()
81
- os.remove("chat.txt")
82
- else:
83
- await ai_reply.edit_text(response_reads)
84
-
85
- backup_chat.append({"role": "model", "parts": [{"text": response_reads}]})
86
- await db._update_chatbot_chat_in_db(message.from_user.id, backup_chat)
87
-
88
- os.remove(file_path)
89
- return
90
- except InvalidArgument as e:
91
- await ai_reply.edit_text(f"Error: {e}")
92
- return
93
- except Exception as e:
94
- await ai_reply.edit_text(f"Error: {e}")
95
- return
96
-
97
- # Handling Audio or Voice Messages
98
- if message.audio or message.voice:
99
- ai_reply = await message.reply_text("Processing...")
100
- audio_file_name = await message.download()
101
- caption = message.caption or "What's this?"
102
- model = genai.GenerativeModel(
103
- model_name="gemini-1.5-flash",
104
- safety_settings={
105
- genai.types.HarmCategory.HARM_CATEGORY_HATE_SPEECH: genai.types.HarmBlockThreshold.BLOCK_NONE,
106
- genai.types.HarmCategory.HARM_CATEGORY_HARASSMENT: genai.types.HarmBlockThreshold.BLOCK_NONE,
107
- genai.types.HarmCategory.HARM_CATEGORY_SEXUALLY_EXPLICIT: genai.types.HarmBlockThreshold.BLOCK_NONE,
108
- genai.types.HarmCategory.HARM_CATEGORY_DANGEROUS_CONTENT: genai.types.HarmBlockThreshold.BLOCK_NONE,
109
- }
110
- )
111
- backup_chat = await db._get_chatbot_chat_from_db(message.from_user.id)
112
- backup_chat.append({"role": "user", "parts": [{"text": caption}]})
113
-
114
- ai_reply.edit_text("Uploading file...")
115
- audio_file = genai.upload_file(path=audio_file_name)
116
-
117
- while audio_file.state.name == "PROCESSING":
118
- await asyncio.sleep(10)
119
- audio_file = genai.get_file(audio_file.name)
120
-
121
- if audio_file.state.name == "FAILED":
122
- await ai_reply.edit_text(f"Error: {audio_file.state.name}")
123
- return
124
-
125
  try:
126
- response = model.generate_content(
127
- [audio_file, caption],
128
- request_options={"timeout": 600}
129
- )
130
- if len(response.text) > 4096:
131
- with open("chat.txt", "w+", encoding="utf8") as out_file:
132
- out_file.write(response.text)
133
- await message.reply_document(
134
- document="chat.txt",
135
- disable_notification=True
136
- )
137
- await ai_reply.delete()
138
- os.remove("chat.txt")
139
- else:
140
- await ai_reply.edit_text(response.text)
141
-
142
- backup_chat.append({"role": "model", "parts": [{"text": response.text}]})
143
- await db._update_chatbot_chat_in_db(message.from_user.id, backup_chat)
144
-
145
- audio_file.delete()
146
- os.remove(audio_file_name)
147
- return
148
- except InvalidArgument as e:
149
- await ai_reply.edit_text(f"Error: {e}")
150
- return
151
  except Exception as e:
152
- await ai_reply.edit_text(f"Error: {e}")
153
- return
154
-
155
- # Handling Video Messages
156
- if message.video:
157
- ai_reply = await message.reply_text("Processing...")
158
- video_file_name = await message.download(file_name="newvideo.mp4")
159
- caption = message.caption or "What's this?"
160
- model = genai.GenerativeModel(
161
- model_name="gemini-1.5-pro",
162
- safety_settings={
163
- genai.types.HarmCategory.HARM_CATEGORY_HATE_SPEECH: genai.types.HarmBlockThreshold.BLOCK_NONE,
164
- genai.types.HarmCategory.HARM_CATEGORY_HARASSMENT: genai.types.HarmBlockThreshold.BLOCK_NONE,
165
- genai.types.HarmCategory.HARM_CATEGORY_SEXUALLY_EXPLICIT: genai.types.HarmBlockThreshold.BLOCK_NONE,
166
- genai.types.HarmCategory.HARM_CATEGORY_DANGEROUS_CONTENT: genai.types.HarmBlockThreshold.BLOCK_NONE,
167
- }
168
- )
169
- backup_chat = await db._get_chatbot_chat_from_db(message.from_user.id)
170
- backup_chat.append({"role": "user", "parts": [{"text": caption}]})
171
-
172
- ai_reply.edit_text("Uploading file...")
173
- video_file = genai.upload_file(path=video_file_name)
174
-
175
- while video_file.state.name == "PROCESSING":
176
- await asyncio.sleep(10)
177
- video_file = genai.get_file(video_file.name)
178
-
179
- if video_file.state.name == "FAILED":
180
- await ai_reply.edit_text(f"Error: {video_file.state.name}")
181
- return
182
-
183
  try:
184
- response = model.generate_content(
185
- [video_file, caption],
186
- request_options={"timeout": 600}
187
- )
188
- if len(response.text) > 4096:
189
- with open("chat.txt", "w+", encoding="utf8") as out_file:
190
- out_file.write(response.text)
191
- await message.reply_document(
192
- document="chat.txt",
193
- disable_notification=True
194
- )
195
- await ai_reply.delete()
196
- os.remove("chat.txt")
197
- else:
198
- await ai_reply.edit_text(response.text)
199
-
200
- backup_chat.append({"role": "model", "parts": [{"text": response.text}]})
201
- await db._update_chatbot_chat_in_db(message.from_user.id, backup_chat)
202
-
203
- video_file.delete()
204
- os.remove(video_file_name)
205
- return
206
- except InvalidArgument as e:
207
- await ai_reply.edit_text(f"Error: {e}")
208
- return
209
- except Exception as e:
210
- await ai_reply.edit_text(f"Error: {e}")
211
- return
212
-
213
- # Handling Text Messages
214
- if message.text:
215
- query = message.text.strip()
216
- match = re.search(r"\b(Randy|Rendi)\b(.*)", query, flags=re.IGNORECASE)
217
- if match:
218
- rest_of_sentence = match.group(2).strip()
219
- query_base = rest_of_sentence if rest_of_sentence else query
220
- else:
221
- query_base = query
222
-
223
- parts = query.split(maxsplit=1)
224
- command = parts[0].lower()
225
- pic_query = parts[1].strip() if len(parts) > 1 else ""
226
-
227
- try:
228
- model_flash = genai.GenerativeModel(
229
- model_name="gemini-1.5-flash"
230
- )
231
- backup_chat = await db._get_chatbot_chat_from_db(message.from_user.id)
232
- backup_chat.append({"role": "user", "parts": [{"text": query_base}]})
233
-
234
- chat_session = model_flash.start_chat(history=backup_chat)
235
- response_data = chat_session.send_message(query_base)
236
- output = response_data.text
237
-
238
- if len(output) > 4096:
239
- with open("chat.txt", "w+", encoding="utf8") as out_file:
240
- out_file.write(output)
241
- await message.reply_document(
242
- document="chat.txt",
243
- disable_notification=True
244
- )
245
- os.remove("chat.txt")
246
- else:
247
- await message.reply_text(output)
248
-
249
- backup_chat.append({"role": "model", "parts": [{"text": output}]})
250
- await db._update_chatbot_chat_in_db(message.from_user.id, backup_chat)
251
- except Exception as e:
252
- await message.reply_text(str(e))
253
- # End of handler
254
-
255
- # Define the main coroutine
256
- async def main():
257
- await db.connect() # Connect to your database
258
- LOGS.info("Connected to the database.")
259
- await client.start() # Start the Pyrogram client
260
- LOGS.info("Bot started successfully.")
261
- await idle() # Keep the bot running until interrupted
262
- LOGS.info("Bot stopping...")
263
- await client.stop() # Ensure the client stops gracefully
264
 
265
- # Entry point
266
  if __name__ == "__main__":
267
- try:
268
- asyncio.run(main())
269
- except (KeyboardInterrupt, SystemExit):
270
- LOGS.info("Bot has been terminated by the user.")
271
- except Exception as e:
272
- LOGS.error(f"Unexpected error: {e}")
 
1
+ #!/usr/bin/env python
2
+ # -*- coding: utf-8 -*-
3
+ # Copyright 2020-2023 (c) Randy W @xtdevs, @xtsea
4
+ #
5
+ # from : https://github.com/TeamKillerX
6
+ # Channel : @RendyProjects
7
+ # This program is free software: you can redistribute it and/or modify
8
+ # it under the terms of the GNU Affero General Public License as published by
9
+ # the Free Software Foundation, either version 3 of the License, or
10
+ # (at your option) any later version.
11
 
12
+ # This program is distributed in the hope that it will be useful,
13
+ # but WITHOUT ANY WARRANTY; without even the implied warranty of
14
+ # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15
+ # GNU Affero General Public License for more details.
16
+ #
17
+ # You should have received a copy of the GNU Affero General Public License
18
+ # along with this program. If not, see <https://www.gnu.org/licenses/>.
19
 
20
+ import asyncio
21
+ import importlib
22
+ import logging
23
+ import sys
24
+ from contextlib import closing, suppress
25
+ from importlib import import_module
 
26
 
27
+ from pyrogram import idle
28
+ from pyrogram.errors import *
29
+ from uvloop import install
 
 
 
30
 
31
+ from Akeno import clients
32
+ from Akeno.plugins import ALL_MODULES
33
+ from Akeno.utils.database import db
34
+ from Akeno.utils.logger import LOGS
35
 
36
+ logging.basicConfig(level=logging.INFO)
37
+ logging.getLogger("pyrogram.syncer").setLevel(logging.WARNING)
38
+ logging.getLogger("pyrogram.client").setLevel(logging.WARNING)
39
+ loop = asyncio.get_event_loop()
 
 
 
40
 
41
+ async def main():
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
42
  try:
43
+ await db.connect()
44
+ for module_name in ALL_MODULES:
45
+ imported_module = import_module(f"Akeno.plugins.{module_name}")
46
+ for cli in clients:
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
47
  try:
48
+ await cli.start()
49
+ except SessionExpired as e:
50
+ LOGS.info(f"Error {e}")
51
+ sys.exit(1)
52
+ except ApiIdInvalid as e:
53
+ LOGS.info(f"Error {e}")
54
+ sys.exit(1)
55
+ except UserDeactivated as e:
56
+ LOGS.info(f"Error {e}")
57
+ sys.exit(1)
58
+ except AuthKeyDuplicated as e:
59
+ LOGS.info(f"Error {e}")
60
+ sys.exit(1)
 
 
 
 
 
 
 
 
 
 
 
 
61
  except Exception as e:
62
+ LOGS.info(f"Error starting userbot: {e}")
63
+ ex = await cli.get_me()
64
+ LOGS.info(f"Started {ex.first_name}")
65
+ await cli.send_message("me", "Starting Akeno Userbot")
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
66
  try:
67
+ await cli.join_chat("RendyProjects")
68
+ except UserIsBlocked:
69
+ return LOGS.info("You have been blocked. Please support @xtdevs")
70
+ await idle()
71
+ except Exception as e:
72
+ LOGS.info(f"Error in main: {e}")
73
+ finally:
74
+ for task in asyncio.all_tasks():
75
+ task.cancel()
76
+ LOGS.info("All tasks completed successfully!")
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
77
 
 
78
  if __name__ == "__main__":
79
+ install()
80
+ with closing(loop):
81
+ with suppress(asyncio.exceptions.CancelledError):
82
+ loop.run_until_complete(main())
83
+ loop.run_until_complete(asyncio.sleep(3.0))