Spaces:
Running
Running
BinaryONe
commited on
Commit
·
0d8a300
1
Parent(s):
dc937f8
Temporary Upload feature
Browse files
FileStream/Database/database.py
CHANGED
@@ -162,15 +162,27 @@ class Database:
|
|
162 |
|
163 |
|
164 |
# ---------------------[ FIND FILE IN DB for Bot and APIs]---------------------#
|
165 |
-
async def get_file(self, _id):
|
166 |
-
|
167 |
-
|
168 |
-
|
169 |
-
|
170 |
-
|
171 |
-
|
172 |
-
|
173 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
174 |
|
175 |
async def get_all_files_api(self,range=None):
|
176 |
#files = self.files.find({})
|
@@ -227,8 +239,11 @@ class Database:
|
|
227 |
{"file.tagged_users." + str(user_id): "PRIVATE"})
|
228 |
return user_files, total_files
|
229 |
|
230 |
-
async def get_file_by_fileuniqueid_only(self, file_unique_id):
|
231 |
-
|
|
|
|
|
|
|
232 |
|
233 |
async def get_file_by_fileuniqueid(self, id, file_unique_id):
|
234 |
count = await self.files.count_documents({"user_id":id,"file.file_unique_id":file_unique_id})
|
@@ -242,36 +257,67 @@ class Database:
|
|
242 |
# ---------------------[ UPDATE FILE IN DB ]---------------------#
|
243 |
|
244 |
async def update_privacy(self, file_details: dict):
|
245 |
-
file = await self.get_file_by_fileuniqueid_only(file_details['file']['file_unique_id'])
|
246 |
# Merge the tagged_user dictionaries
|
247 |
updated_tagged_users = file['file']['tagged_users'].copy()
|
248 |
updated_tagged_users.update(file_details['file']['tagged_users'])
|
249 |
#for value in updated_tagged_users.values():
|
250 |
# if value == "PRIVATE":
|
251 |
# file_details['privacy_type']=="PRIVATE"
|
252 |
-
file_details['privacy_type']
|
253 |
-
|
254 |
-
|
255 |
-
|
256 |
-
|
257 |
-
|
258 |
-
|
259 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
260 |
|
261 |
-
async def
|
262 |
-
|
263 |
-
|
264 |
-
|
265 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
266 |
|
267 |
-
async def update_file_info(self, _id, file_info: dict):
|
268 |
-
await self.files.update_one({"_id": ObjectId(_id)}, {
|
269 |
-
"$set": {
|
270 |
-
"message_id": file_info['message_id'],
|
271 |
-
"location": file_info['location'],
|
272 |
-
"file": file_info['file']
|
273 |
-
}
|
274 |
-
})
|
275 |
|
276 |
|
277 |
#--------------------------PrivateFiles-------------------
|
@@ -301,7 +347,7 @@ class Database:
|
|
301 |
}})
|
302 |
|
303 |
async def update_private_privacy(self, file_details: dict, instruction: dict):
|
304 |
-
file = await self.get_file_by_fileuniqueid_only(file_details['file']['file_unique_id'])
|
305 |
await self.pfile.insert_one(file_details)
|
306 |
|
307 |
#####################-------search for inline query ------------###############
|
|
|
162 |
|
163 |
|
164 |
# ---------------------[ FIND FILE IN DB for Bot and APIs]---------------------#
|
165 |
+
async def get_file(self, _id, privacy_type:str ):
|
166 |
+
if privacy_type == "PUBLIC":
|
167 |
+
try:
|
168 |
+
file_info = await self.files.find_one({"_id": ObjectId(_id)})
|
169 |
+
if not file_info:
|
170 |
+
print('file not found')
|
171 |
+
#raise FileNotFound
|
172 |
+
return file_info
|
173 |
+
except InvalidId:
|
174 |
+
raise FileNotFound
|
175 |
+
elif privacy_type == "TEMPORARY":
|
176 |
+
try:
|
177 |
+
file_info = await self.tfiles.find_one({"_id": ObjectId(_id)})
|
178 |
+
if not file_info:
|
179 |
+
print('file not found')
|
180 |
+
#raise FileNotFound
|
181 |
+
return file_info
|
182 |
+
except InvalidId:
|
183 |
+
raise FileNotFound
|
184 |
+
else:
|
185 |
+
return None
|
186 |
|
187 |
async def get_all_files_api(self,range=None):
|
188 |
#files = self.files.find({})
|
|
|
239 |
{"file.tagged_users." + str(user_id): "PRIVATE"})
|
240 |
return user_files, total_files
|
241 |
|
242 |
+
async def get_file_by_fileuniqueid_only(self, file_unique_id, privacy_type:str):
|
243 |
+
if privacy_type=="TEMPORARY":
|
244 |
+
return await self.files.find_one({"file.file_unique_id": file_unique_id})
|
245 |
+
else:
|
246 |
+
return await self.files.find_one({"file.file_unique_id": file_unique_id})
|
247 |
|
248 |
async def get_file_by_fileuniqueid(self, id, file_unique_id):
|
249 |
count = await self.files.count_documents({"user_id":id,"file.file_unique_id":file_unique_id})
|
|
|
257 |
# ---------------------[ UPDATE FILE IN DB ]---------------------#
|
258 |
|
259 |
async def update_privacy(self, file_details: dict):
|
260 |
+
file = await self.get_file_by_fileuniqueid_only(file_details['file']['file_unique_id'],file_details['privacy_type'])
|
261 |
# Merge the tagged_user dictionaries
|
262 |
updated_tagged_users = file['file']['tagged_users'].copy()
|
263 |
updated_tagged_users.update(file_details['file']['tagged_users'])
|
264 |
#for value in updated_tagged_users.values():
|
265 |
# if value == "PRIVATE":
|
266 |
# file_details['privacy_type']=="PRIVATE"
|
267 |
+
if file_details['privacy_type']=="TEMPORARY":
|
268 |
+
#file_details['privacy_type'] = "PRIVATE" if any(value == "PRIVATE" for value in updated_tagged_users.values()) else file_details['privacy_type']
|
269 |
+
await self.tfiles.update_one({"_id": file['_id']}, {
|
270 |
+
"$set": {
|
271 |
+
"privacy_type": file_details['privacy_type'],
|
272 |
+
"file.tagged_users": updated_tagged_users
|
273 |
+
}
|
274 |
+
})
|
275 |
+
return await self.get_file_by_fileuniqueid_only(file_details['file']['file_unique_id'],file_details['privacy_type'])
|
276 |
+
else:
|
277 |
+
file_details['privacy_type'] = "PRIVATE" if any(value == "PRIVATE" for value in updated_tagged_users.values()) else file_details['privacy_type']
|
278 |
+
|
279 |
+
await self.files.update_one({"_id": file['_id']}, {
|
280 |
+
"$set": {
|
281 |
+
"privacy_type": file_details['privacy_type'],
|
282 |
+
"file.tagged_users": updated_tagged_users
|
283 |
+
}
|
284 |
+
})
|
285 |
+
return await self.get_file_by_fileuniqueid_only(file_details['file']['file_unique_id'],file_details['privacy_type'])
|
286 |
+
|
287 |
+
async def update_file_ids(self, _id, file_ids: dict, privacy_type:str):
|
288 |
+
if privacy_type=="PUBLIC":
|
289 |
+
await self.files.update_one({"_id": ObjectId(_id)},
|
290 |
+
{"$set": {
|
291 |
+
"file_ids": file_ids
|
292 |
+
}})
|
293 |
+
elif privacy_type=="TEMPORARY":
|
294 |
+
await self.tfiles.update_one({"_id": ObjectId(_id)},
|
295 |
+
{"$set": {
|
296 |
+
"file_ids": file_ids
|
297 |
+
}})
|
298 |
+
else:
|
299 |
+
return None
|
300 |
|
301 |
+
async def update_file_info(self, _id, file_info: dict, privacy_type:str):
|
302 |
+
if privacy_type=="PUBLIC":
|
303 |
+
await self.files.update_one({"_id": ObjectId(_id)}, {
|
304 |
+
"$set": {
|
305 |
+
"message_id": file_info['message_id'],
|
306 |
+
"location": file_info['location'],
|
307 |
+
"file": file_info['file']
|
308 |
+
}
|
309 |
+
})
|
310 |
+
elif privacy_type=="TEMPORARY":
|
311 |
+
await self.tfiles.update_one({"_id": ObjectId(_id)}, {
|
312 |
+
"$set": {
|
313 |
+
"message_id": file_info['message_id'],
|
314 |
+
"location": file_info['location'],
|
315 |
+
"file": file_info['file']
|
316 |
+
}
|
317 |
+
})
|
318 |
+
else:
|
319 |
+
return None
|
320 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
321 |
|
322 |
|
323 |
#--------------------------PrivateFiles-------------------
|
|
|
347 |
}})
|
348 |
|
349 |
async def update_private_privacy(self, file_details: dict, instruction: dict):
|
350 |
+
file = await self.get_file_by_fileuniqueid_only(file_details['file']['file_unique_id'],file_details['privacy_type'])
|
351 |
await self.pfile.insert_one(file_details)
|
352 |
|
353 |
#####################-------search for inline query ------------###############
|
FileStream/Tools/cleanup.py
CHANGED
@@ -1,6 +1,29 @@
|
|
1 |
import re
|
2 |
|
3 |
# words_to_remove = ["FC","HEVC","ɴᴀᴍᴇ:","-","BuLMoviee" ,"𝗝𝗼𝗶𝗻 𝗨𝘀 𝗢𝗻 𝗧𝗲𝗹𝗲𝗴𝗿𝗮�","SIDHUU 591","𝑱𝒐𝒊𝒏 𝑼𝒔 𝑶ɴ 𝑻ᴇʟᴇɢʀᴀᴍ","Tɪᴛʟᴇ :"]
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
4 |
|
5 |
def remove_words(text, words_to_remove):
|
6 |
# Join the words to remove into a single regex pattern
|
|
|
1 |
import re
|
2 |
|
3 |
# words_to_remove = ["FC","HEVC","ɴᴀᴍᴇ:","-","BuLMoviee" ,"𝗝𝗼𝗶𝗻 𝗨𝘀 𝗢𝗻 𝗧𝗲𝗹𝗲𝗴𝗿𝗮�","SIDHUU 591","𝑱𝒐𝒊𝒏 𝑼𝒔 𝑶ɴ 𝑻ᴇʟᴇɢʀᴀᴍ","Tɪᴛʟᴇ :"]
|
4 |
+
words_to_remove = [
|
5 |
+
"Fɪʟᴇ",
|
6 |
+
"Fɪʟᴇ ɴᴀᴍᴇ :",
|
7 |
+
"FC",
|
8 |
+
"HEVC",
|
9 |
+
"ɴᴀᴍᴇ",
|
10 |
+
"Tɪᴛʟᴇ :",
|
11 |
+
"-",
|
12 |
+
"BuLMoviee",
|
13 |
+
"𝗝𝗼𝗶𝗻 𝗨𝘀 𝗢𝗻 𝗧𝗲𝗹𝗲𝗴𝗿𝗮�",
|
14 |
+
"𝗝𝗼𝗶𝗻 𝗨𝘀 𝗢𝗻 𝗧𝗲𝗹𝗲𝗴𝗿𝗮𝗺",
|
15 |
+
"SIDHUU 591",
|
16 |
+
"𝑱𝒐𝒊𝒏 𝑼𝒔 𝑶ɴ 𝑻ᴇʟᴇɢʀᴀᴍ",
|
17 |
+
"Tɪᴛʟᴇ :",
|
18 |
+
"Bollywood",
|
19 |
+
"mkv",
|
20 |
+
"Mᴏᴠɪᴇ",
|
21 |
+
"ɢʀᴏᴜᴘ",
|
22 |
+
"TGxMALLU_MOVIE",
|
23 |
+
"[Tg-@New_Movies_OnTG]",
|
24 |
+
"[@ClipmateEmpire]",
|
25 |
+
"@Horek_Rokom2020"
|
26 |
+
]
|
27 |
|
28 |
def remove_words(text, words_to_remove):
|
29 |
# Join the words to remove into a single regex pattern
|
FileStream/utils/FileProcessors/bot_utils.py
CHANGED
@@ -98,7 +98,7 @@ async def upload_type_func(file_info,replied_message):
|
|
98 |
user_id = file_info['user_id']
|
99 |
message_id = file_info['message_id']
|
100 |
"""
|
101 |
-
existing_file = await db.get_file_by_fileuniqueid_only(file_info['file']['file_unique_id'])
|
102 |
if existing_file :
|
103 |
|
104 |
reply_markup, stream_text = await gen_link(existing_file['_id'])
|
|
|
98 |
user_id = file_info['user_id']
|
99 |
message_id = file_info['message_id']
|
100 |
"""
|
101 |
+
existing_file = await db.get_file_by_fileuniqueid_only(file_info['file']['file_unique_id'],file_info['privacy_type'])
|
102 |
if existing_file :
|
103 |
|
104 |
reply_markup, stream_text = await gen_link(existing_file['_id'])
|
FileStream/utils/FileProcessors/file_properties.py
CHANGED
@@ -18,9 +18,9 @@ from FileStream.Tools.cleanup import clean_text,clean_string_special
|
|
18 |
db = Database(Telegram.DATABASE_URL, Telegram.SESSION_NAME)
|
19 |
|
20 |
|
21 |
-
async def send_file(client: Client, db_id,
|
22 |
file_caption = getattr(message, 'caption', None) or get_name(message)
|
23 |
-
log_msg = await client.send_cached_media(chat_id=send_to, file_id=file_id, caption=f"**{file_caption}**")
|
24 |
|
25 |
if message.chat.type == ChatType.PRIVATE:
|
26 |
await log_msg.reply_text(
|
@@ -30,9 +30,9 @@ async def send_file(client: Client, db_id, file_id: str, message, send_to):
|
|
30 |
parse_mode=ParseMode.MARKDOWN,
|
31 |
quote=True)
|
32 |
else:
|
33 |
-
await log_msg.
|
34 |
text=
|
35 |
-
f"**
|
36 |
disable_web_page_preview=True,
|
37 |
parse_mode=ParseMode.MARKDOWN,
|
38 |
quote=True)
|
@@ -80,10 +80,10 @@ async def get_file_ids(client: Client | bool, db_id: str, message) -> Optional[F
|
|
80 |
print("Already Present in Data Sources ", Telegram.DATA_SOURCES)
|
81 |
else:
|
82 |
source = Telegram.DFLOG_CHANNEL if file_info['privacy_type']=="PUBLIC" else Telegram.FLOG_CHANNEL
|
83 |
-
log_msg = await send_file(FileStream, db_id, file_info
|
84 |
#updated_info = update_file_info(log_msg)
|
85 |
-
await db.update_file_info(db_id, update_file_info(log_msg))
|
86 |
-
await db.update_file_ids(db_id, await update_file_id(await db.get_file(db_id),MULTI_CLIENTS))
|
87 |
|
88 |
logging.debug("Stored file_id of all clients in DB")
|
89 |
if not client:
|
|
|
18 |
db = Database(Telegram.DATABASE_URL, Telegram.SESSION_NAME)
|
19 |
|
20 |
|
21 |
+
async def send_file(client: Client, db_id, file_info , message, send_to):
|
22 |
file_caption = getattr(message, 'caption', None) or get_name(message)
|
23 |
+
log_msg = await client.send_cached_media(chat_id=send_to, file_id= file_info['file']['file_id'] , caption=f"**{file_caption}**")
|
24 |
|
25 |
if message.chat.type == ChatType.PRIVATE:
|
26 |
await log_msg.reply_text(
|
|
|
30 |
parse_mode=ParseMode.MARKDOWN,
|
31 |
quote=True)
|
32 |
else:
|
33 |
+
await log_msg.edit_text(
|
34 |
text=
|
35 |
+
f"**{file_info['type']} :{file_info['title']} ReleaseDate {file_info['release_date']} \n Description{file_info['description']} \n Genre {file_info['genre']}\n\nRᴇQᴜᴇꜱᴛᴇᴅ ʙʏ :** {message.chat.title} \n**Cʜᴀɴɴᴇʟ ɪᴅ :** `{message.chat.id}`\n**Fɪʟᴇ ɪᴅ :** `{db_id}`",
|
36 |
disable_web_page_preview=True,
|
37 |
parse_mode=ParseMode.MARKDOWN,
|
38 |
quote=True)
|
|
|
80 |
print("Already Present in Data Sources ", Telegram.DATA_SOURCES)
|
81 |
else:
|
82 |
source = Telegram.DFLOG_CHANNEL if file_info['privacy_type']=="PUBLIC" else Telegram.FLOG_CHANNEL
|
83 |
+
log_msg = await send_file(FileStream, db_id, file_info, message, source)
|
84 |
#updated_info = update_file_info(log_msg)
|
85 |
+
await db.update_file_info(db_id, update_file_info(log_msg), file_info['privacy_type'])
|
86 |
+
await db.update_file_ids(db_id, await update_file_id(await db.get_file(db_id,file_info['privacy_type']),MULTI_CLIENTS), file_info['privacy_type'])
|
87 |
|
88 |
logging.debug("Stored file_id of all clients in DB")
|
89 |
if not client:
|