Spaces:
Running
Running
import os | |
import json | |
import logging | |
import asyncio | |
import traceback | |
import aiohttp_cors | |
from aiohttp import web | |
from pyrogram import raw | |
from bson import ObjectId | |
from bson.json_util import dumps | |
from aiohttp.http_exceptions import BadStatusLine | |
#---------------------Local Imports----------------------------------# | |
from FileStream.bot import req_client | |
from ..Functions import media_streamer | |
from FileStream.Database import Database | |
from FileStream.config import Telegram, Server | |
from FileStream.Exceptions import FileNotFound, InvalidHash | |
from FileStream.APIs.TMDB.Endpoint import search_tmdb_any, search_tmdb_tv, search_tmdb_movies | |
# Upload endpoint with optimization (not reading entire file into memory) | |
async def upload_file(request: web.Request): | |
data = await request.post() | |
file = data.get('file').file | |
chunk = file.read() # Read the file in chunks to avoid memory overload | |
file_details = dict( | |
user_id="thebinary1", | |
dropzone_id=str(data["dzuuid"]), | |
file=dict( | |
file_id=str(data["dzuuid"]), | |
file_unique_id=str(data["dzuuid"]), | |
file_name=str(data.get('file').filename), | |
file_size=int(data["dztotalfilesize"]), | |
mime_type=mime_identifier(str(data.get('file').filename)), | |
part_size=int(data["dzchunksize"]), | |
file_part=int(data["dzchunkindex"]), | |
total_parts=int(data["dztotalchunkcount"]) | |
), | |
time=Time_ISTKolNow(), | |
user_type="WEB", | |
privacy_type="PRIVATE" | |
) | |
client_req = await req_client() # Ensure client request is fast and optimized | |
tg_connect = TeleUploader(client_req["client"]) | |
main = await tg_connect.upload_web_file(file_details, chunk) | |
return web.json_response({ | |
"status": main.get("status"), | |
"message": main.get("message") | |
}) |