privateone commited on
Commit
8fde8ae
·
1 Parent(s): 6026e10

Testing Changes

Browse files
FileStream/server/Functions/downloader.py CHANGED
@@ -22,22 +22,23 @@ from FileStream.utils.FileProcessors.custom_ul import TeleUploader
22
  async def media_streamer(request: web.Request, db_id: str, speed: str):
23
  # Get the Range header from the request, default to 0 if not present
24
  range_header = request.headers.get("Range", 0)
25
-
26
- index = min(WORK_LOADS, key=WORK_LOADS.get)
27
- faster_client = MULTI_CLIENTS[index]
28
  # Log client info if multi-client mode
 
 
29
 
 
 
30
 
31
- if Telegram.MULTI_CLIENT:
32
- logging.info(f"Client {index} is now serving {request.headers.get('X-FORWARDED-FOR', request.remote)}")
 
 
33
 
34
- if faster_client in ACTIVE_CLIENTS:
35
- tg_connect = ACTIVE_CLIENTS[faster_client]
36
- logging.debug(f"Using cached ByteStreamer object for client {index}")
37
  else:
38
- logging.debug(f"Creating new ByteStreamer object forclient {index}")
39
- tg_connect = utils.ByteStreamer(faster_client)
40
- ACTIVE_CLIENTS[faster_client] = tg_connect
41
  try:
42
  # Fetch file properties once and use it throughout
43
  logging.debug("Fetching file properties")
@@ -67,7 +68,7 @@ async def media_streamer(request: web.Request, db_id: str, speed: str):
67
 
68
  # Request the file chunks
69
  body = tg_connect.yield_file(
70
- file_id, index, offset, first_part_cut, last_part_cut, part_count, chunk_size
71
  )
72
 
73
  # Determine MIME type and filename
@@ -107,7 +108,6 @@ def parse_range(range_header: str, file_size: int):
107
  #from_bytes = request.http_range.start or 0
108
  #until_bytes = (request.http_range.stop or file_size) - 1
109
  return from_bytes, until_bytes
110
-
111
  except ValueError:
112
  return None, None
113
 
 
22
  async def media_streamer(request: web.Request, db_id: str, speed: str):
23
  # Get the Range header from the request, default to 0 if not present
24
  range_header = request.headers.get("Range", 0)
25
+ client = await req_client()
 
 
26
  # Log client info if multi-client mode
27
+ if Telegram.MULTI_CLIENT:
28
+ logging.info(f"Client {client['index']} is now serving {request.headers.get('X-FORWARDED-FOR', request.remote)}")
29
 
30
+ # Use an existing ByteStreamer or create a new one
31
+ tg_connect = ACTIVE_CLIENTS.get(client['client'], None)
32
 
33
+ if tg_connect is None:
34
+ logging.debug(f"Creating new ByteStreamer object for client {client['index']}")
35
+ tg_connect = utils.ByteStreamer(client['client'])
36
+ ACTIVE_CLIENTS[client['client']] = tg_connect
37
 
 
 
 
38
  else:
39
+ tg_connect.update_last_activity()
40
+ logging.debug(f"Using cached ByteStreamer object for client {client['index']}")
41
+
42
  try:
43
  # Fetch file properties once and use it throughout
44
  logging.debug("Fetching file properties")
 
68
 
69
  # Request the file chunks
70
  body = tg_connect.yield_file(
71
+ file_id, client['index'], offset, first_part_cut, last_part_cut, part_count, chunk_size
72
  )
73
 
74
  # Determine MIME type and filename
 
108
  #from_bytes = request.http_range.start or 0
109
  #until_bytes = (request.http_range.stop or file_size) - 1
110
  return from_bytes, until_bytes
 
111
  except ValueError:
112
  return None, None
113
 
FileStream/server/Functions/{downloader copy.py → downloader_copy.py} RENAMED
@@ -22,23 +22,22 @@ from FileStream.utils.FileProcessors.custom_ul import TeleUploader
22
  async def media_streamer(request: web.Request, db_id: str, speed: str):
23
  # Get the Range header from the request, default to 0 if not present
24
  range_header = request.headers.get("Range", 0)
25
- client = await req_client()
 
 
26
  # Log client info if multi-client mode
27
- if Telegram.MULTI_CLIENT:
28
- logging.info(f"Client {client['index']} is now serving {request.headers.get('X-FORWARDED-FOR', request.remote)}")
29
 
30
- # Use an existing ByteStreamer or create a new one
31
- tg_connect = ACTIVE_CLIENTS.get(client['client'], None)
32
 
33
- if tg_connect is None:
34
- logging.debug(f"Creating new ByteStreamer object for client {client['index']}")
35
- tg_connect = utils.ByteStreamer(client['client'])
36
- ACTIVE_CLIENTS[client['client']] = tg_connect
37
 
 
 
 
38
  else:
39
- tg_connect.update_last_activity()
40
- logging.debug(f"Using cached ByteStreamer object for client {client['index']}")
41
-
42
  try:
43
  # Fetch file properties once and use it throughout
44
  logging.debug("Fetching file properties")
@@ -68,7 +67,7 @@ async def media_streamer(request: web.Request, db_id: str, speed: str):
68
 
69
  # Request the file chunks
70
  body = tg_connect.yield_file(
71
- file_id, client['index'], offset, first_part_cut, last_part_cut, part_count, chunk_size
72
  )
73
 
74
  # Determine MIME type and filename
@@ -108,6 +107,7 @@ def parse_range(range_header: str, file_size: int):
108
  #from_bytes = request.http_range.start or 0
109
  #until_bytes = (request.http_range.stop or file_size) - 1
110
  return from_bytes, until_bytes
 
111
  except ValueError:
112
  return None, None
113
 
 
22
  async def media_streamer(request: web.Request, db_id: str, speed: str):
23
  # Get the Range header from the request, default to 0 if not present
24
  range_header = request.headers.get("Range", 0)
25
+
26
+ index = min(WORK_LOADS, key=WORK_LOADS.get)
27
+ faster_client = MULTI_CLIENTS[index]
28
  # Log client info if multi-client mode
 
 
29
 
 
 
30
 
31
+ if Telegram.MULTI_CLIENT:
32
+ logging.info(f"Client {index} is now serving {request.headers.get('X-FORWARDED-FOR', request.remote)}")
 
 
33
 
34
+ if faster_client in ACTIVE_CLIENTS:
35
+ tg_connect = ACTIVE_CLIENTS[faster_client]
36
+ logging.debug(f"Using cached ByteStreamer object for client {index}")
37
  else:
38
+ logging.debug(f"Creating new ByteStreamer object forclient {index}")
39
+ tg_connect = utils.ByteStreamer(faster_client)
40
+ ACTIVE_CLIENTS[faster_client] = tg_connect
41
  try:
42
  # Fetch file properties once and use it throughout
43
  logging.debug("Fetching file properties")
 
67
 
68
  # Request the file chunks
69
  body = tg_connect.yield_file(
70
+ file_id, index, offset, first_part_cut, last_part_cut, part_count, chunk_size
71
  )
72
 
73
  # Determine MIME type and filename
 
107
  #from_bytes = request.http_range.start or 0
108
  #until_bytes = (request.http_range.stop or file_size) - 1
109
  return from_bytes, until_bytes
110
+
111
  except ValueError:
112
  return None, None
113
 
FileStream/utils/FileProcessors/custom_dl.py CHANGED
@@ -5,7 +5,7 @@ import asyncio
5
  import logging
6
  import traceback
7
  from aiohttp import web
8
- from typing import Dict, Union
9
  from pyrogram.types import Message
10
  from pyrogram import Client, utils, raw
11
  from pyrogram.session import Session, Auth
@@ -29,7 +29,7 @@ class ByteStreamer:
29
  self.clean_timer = 30 * 60 # Cache cleanup timer set to 30 minutes
30
  self.client: Client = client
31
  self.cached_file_ids: Dict[str, FileId] = {} # Cache to store file properties by db_id
32
- self.last_activity: float = asyncio.get_event_loop().time() # Track last activity time for the client
33
  asyncio.create_task(self.clean_cache()) # Start the cache cleanup task
34
 
35
  def update_last_activity(self):
 
5
  import logging
6
  import traceback
7
  from aiohttp import web
8
+ from typing import Dict, Union,Optional
9
  from pyrogram.types import Message
10
  from pyrogram import Client, utils, raw
11
  from pyrogram.session import Session, Auth
 
29
  self.clean_timer = 30 * 60 # Cache cleanup timer set to 30 minutes
30
  self.client: Client = client
31
  self.cached_file_ids: Dict[str, FileId] = {} # Cache to store file properties by db_id
32
+ self.last_activity: float = Optional[float] # Track last activity time for the client
33
  asyncio.create_task(self.clean_cache()) # Start the cache cleanup task
34
 
35
  def update_last_activity(self):