# terabox_utils.py import re import requests import asyncio from functools import partial import logging import os import math import time import config logger = logging.getLogger(__name__) os.makedirs("downloads", exist_ok=True) # --- Utility --- def format_bytes(size_bytes: int) -> str: if size_bytes <= 0: return "0 B" size_name = ("B", "KB", "MB", "GB", "TB") i = min(int(math.log(size_bytes, 1024)), len(size_name) - 1) p = math.pow(1024, i) s = round(size_bytes / p, 2) return f"{s} {size_name[i]}" # --- Terabox --- async def extract_terabox_short_id(full_url: str): patterns = [ r'terabox\.com/s/([a-zA-Z0-9_-]+)', r'teraboxapp\.com/s/([a-zA-Z0-9_-]+)', r'1024tera\.com/s/([a-zA-Z0-9_-]+)', r'freeterabox\.com/s/([a-zA-Z0-9_-]+)', r'terabox\.com/sharing/link\?surl=([a-zA-Z0-9_-]+)', r'terasharelink\.com/s/([a-zA-Z0-9_-]+)', r'4funbox\.com/s/([a-zA-Z0-9_-]+)', r'box-links\.com/s/([a-zA-Z0-9_-]+)' ] for p in patterns: if m := re.search(p, full_url, re.I): return m.group(1) return None async def get_final_url_and_filename(original_link: str): payload = {"link": original_link} headers = {"User-Agent": "Mozilla/5.0"} try: loop = asyncio.get_event_loop() r = await loop.run_in_executor( None, partial(requests.post, config.TERABOX_WORKER_URL, headers=headers, json=payload, timeout=30) ) r.raise_for_status() data = r.json() dl = data.get("proxy_url") fn = data.get("file_name") if data.get("error") or not dl or not fn: return None, None, data.get('error', 'Worker returned incomplete data.') return dl, fn, None except Exception as e: return None, None, str(e) async def download_terabox_file(url: str, filename: str): safe_fn = re.sub(r'[\\/*?:"<>|]', "_", filename)[:200] download_path = os.path.join("downloads", f"{int(time.time())}_{safe_fn}") try: loop = asyncio.get_event_loop() headers = { "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/124.0.0.0 Safari/537.36", "Accept": "*/*", "Referer": "https://teraboxapp.com/" } r = await loop.run_in_executor( None, partial(requests.get, url, headers=headers, stream=True, timeout=(10, 300), allow_redirects=True) ) r.raise_for_status() total_size = int(r.headers.get('content-length', 0)) dl_size = 0 with open(download_path, 'wb') as f: for chunk in r.iter_content(chunk_size=1024 * 1024): if chunk: f.write(chunk) dl_size += len(chunk) logger.info(f"Downloaded {safe_fn} ({format_bytes(dl_size)})") return download_path, None except Exception as e: if os.path.exists(download_path): os.remove(download_path) return None, str(e)