privateone commited on
Commit
faa9726
·
1 Parent(s): 2a429c7

Code Updates & Optimisations Clearing UP ACTIVE_CLIENTS

Browse files
FileStream/server/Functions/downloader.py CHANGED
@@ -1,8 +1,8 @@
1
  import os
2
  import time
3
  import math
4
- import asyncio
5
  import logging
 
6
  import traceback
7
  from aiohttp import web
8
  from pyrogram import raw
@@ -14,17 +14,14 @@ from FileStream.config import Telegram
14
  from FileStream.bot import req_client, FileStream
15
  from FileStream import utils, StartTime, __version__
16
  from FileStream.Tools import mime_identifier, Time_ISTKolNow
17
- from FileStream.utils.FileProcessors.custom_ul import TeleUploader
18
- from FileStream.server.exceptions import FIleNotFound, InvalidHash
19
  from FileStream.bot import MULTI_CLIENTS, WORK_LOADS, ACTIVE_CLIENTS
 
20
  from FileStream.server.render_template import render_page, render_upload
21
-
22
 
23
 
24
  async def media_streamer(request: web.Request, db_id: str, speed: str):
25
- """
26
- Handle streaming of media files to the client.
27
- """
28
  range_header = request.headers.get("Range", "bytes=0-")
29
  client = await req_client()
30
 
@@ -32,23 +29,22 @@ async def media_streamer(request: web.Request, db_id: str, speed: str):
32
  if Telegram.MULTI_CLIENT:
33
  logging.info(f"Client {client['index']} is now serving {request.headers.get('X-FORWARDED-FOR', request.remote)}")
34
 
35
- # Get or create the ByteStreamer object
36
  tg_connect = ACTIVE_CLIENTS.get(client['client'], None)
37
 
38
  if tg_connect is None:
39
  logging.debug(f"Creating new ByteStreamer object for client {client['index']}")
40
  tg_connect = utils.ByteStreamer(client['client'])
41
- ACTIVE_CLIENTS[client['client']] = {
42
- "last_activity": asyncio.get_event_loop().time(), # Store current timestamp
43
- "streamer": tg_connect
44
- }
45
  else:
 
46
  logging.debug(f"Using cached ByteStreamer object for client {client['index']}")
47
 
48
  try:
49
  # Fetch file properties once and use it throughout
50
  logging.debug("Fetching file properties")
51
- file_id = await tg_connect["streamer"].get_file_properties(db_id, MULTI_CLIENTS)
52
  file_size = file_id.file_size
53
 
54
  # Parse range header efficiently
@@ -63,7 +59,7 @@ async def media_streamer(request: web.Request, db_id: str, speed: str):
63
  )
64
 
65
  # Set chunk size based on speed
66
- chunk_size = 1024 * 1024 if speed == "FAST" else 512 * 1024
67
 
68
  # Ensure we don't go past the file size
69
  until_bytes = min(until_bytes, file_size - 1)
@@ -72,7 +68,7 @@ async def media_streamer(request: web.Request, db_id: str, speed: str):
72
  offset, first_part_cut, last_part_cut, part_count = compute_offsets(from_bytes, until_bytes, chunk_size)
73
 
74
  # Request the file chunks
75
- body = tg_connect["streamer"].yield_file(
76
  file_id, client['index'], offset, first_part_cut, last_part_cut, part_count, chunk_size
77
  )
78
 
 
1
  import os
2
  import time
3
  import math
 
4
  import logging
5
+ import asyncio
6
  import traceback
7
  from aiohttp import web
8
  from pyrogram import raw
 
14
  from FileStream.bot import req_client, FileStream
15
  from FileStream import utils, StartTime, __version__
16
  from FileStream.Tools import mime_identifier, Time_ISTKolNow
 
 
17
  from FileStream.bot import MULTI_CLIENTS, WORK_LOADS, ACTIVE_CLIENTS
18
+ from FileStream.server.exceptions import FIleNotFound, InvalidHash
19
  from FileStream.server.render_template import render_page, render_upload
20
+ from FileStream.utils.FileProcessors.custom_ul import TeleUploader
21
 
22
 
23
  async def media_streamer(request: web.Request, db_id: str, speed: str):
24
+ # Get the Range header from the request, default to 0 if not present
 
 
25
  range_header = request.headers.get("Range", "bytes=0-")
26
  client = await req_client()
27
 
 
29
  if Telegram.MULTI_CLIENT:
30
  logging.info(f"Client {client['index']} is now serving {request.headers.get('X-FORWARDED-FOR', request.remote)}")
31
 
32
+ # Use an existing ByteStreamer or create a new one
33
  tg_connect = ACTIVE_CLIENTS.get(client['client'], None)
34
 
35
  if tg_connect is None:
36
  logging.debug(f"Creating new ByteStreamer object for client {client['index']}")
37
  tg_connect = utils.ByteStreamer(client['client'])
38
+ ACTIVE_CLIENTS[client['client']] = tg_connect
39
+
 
 
40
  else:
41
+ tg_connect.update_last_activity()
42
  logging.debug(f"Using cached ByteStreamer object for client {client['index']}")
43
 
44
  try:
45
  # Fetch file properties once and use it throughout
46
  logging.debug("Fetching file properties")
47
+ file_id = await tg_connect.get_file_properties(db_id, MULTI_CLIENTS)
48
  file_size = file_id.file_size
49
 
50
  # Parse range header efficiently
 
59
  )
60
 
61
  # Set chunk size based on speed
62
+ chunk_size = 1024 * 1024 if speed == "FAST" else 512 * 1024
63
 
64
  # Ensure we don't go past the file size
65
  until_bytes = min(until_bytes, file_size - 1)
 
68
  offset, first_part_cut, last_part_cut, part_count = compute_offsets(from_bytes, until_bytes, chunk_size)
69
 
70
  # Request the file chunks
71
+ body = tg_connect.yield_file(
72
  file_id, client['index'], offset, first_part_cut, last_part_cut, part_count, chunk_size
73
  )
74
 
FileStream/server/__init__.py CHANGED
@@ -18,9 +18,10 @@ async def clear_inactive_clients():
18
  await asyncio.sleep(INACTIVITY_TIMEOUT) # Check every INACTIVITY_TIMEOUT seconds
19
 
20
  now = asyncio.get_event_loop().time() # Get current time
 
21
  inactive_clients = [
22
- client for client, data in ACTIVE_CLIENTS.items()
23
- if now - data["last_activity"] > INACTIVITY_TIMEOUT # Compare with last_activity timestamp
24
  ]
25
 
26
  for client in inactive_clients:
 
18
  await asyncio.sleep(INACTIVITY_TIMEOUT) # Check every INACTIVITY_TIMEOUT seconds
19
 
20
  now = asyncio.get_event_loop().time() # Get current time
21
+
22
  inactive_clients = [
23
+ client_id for client_id, tg_connect in ACTIVE_CLIENTS.items()
24
+ if now - tg_connect.last_activity > INACTIVITY_TIMEOUT # Compare with last_activity timestamp
25
  ]
26
 
27
  for client in inactive_clients:
FileStream/utils/FileProcessors/custom_dl.py CHANGED
@@ -1,26 +1,41 @@
 
 
 
1
  import asyncio
2
  import logging
3
- from typing import Dict, Union
4
- from pyrogram.types import Message
5
- from pyrogram import Client, utils, raw
6
- from pyrogram.session import Session, Auth
7
- from pyrogram.errors import AuthBytesInvalid
8
- from pyrogram.file_id import FileId, FileType, ThumbnailSource
9
 
10
- #--------------------Local Imports -------------------------------#
11
 
12
- from .file_properties import get_file_ids
13
- from FileStream.bot import WORK_LOADS
 
 
 
 
 
 
14
 
15
 
16
  class ByteStreamer:
17
-
18
  def __init__(self, client: Client):
19
  self.clean_timer = 30 * 60 # Cache cleanup timer set to 30 minutes
20
  self.client: Client = client
21
  self.cached_file_ids: Dict[str, FileId] = {} # Cache to store file properties by db_id
 
22
  asyncio.create_task(self.clean_cache()) # Start the cache cleanup task
23
 
 
 
 
 
 
 
 
 
24
  async def get_file_properties(self, db_id: str, MULTI_CLIENTS) -> FileId:
25
  """
26
  Returns the properties of a media of a specific message in a FileId class.
@@ -206,6 +221,15 @@ class ByteStreamer:
206
  """
207
  while True:
208
  await asyncio.sleep(self.clean_timer) # Wait for the cleanup interval
209
- logging.debug("Cleaning cached file properties...")
210
  self.cached_file_ids.clear() # Clear the cache
211
  logging.debug("Cache cleaned.")
 
 
 
 
 
 
 
 
 
 
1
+ import os
2
+ import time
3
+ import math
4
  import asyncio
5
  import logging
6
+ import traceback
7
+ from aiohttp import web
8
+ from pyrogram import raw
9
+ from aiohttp.http_exceptions import BadStatusLine
 
 
10
 
11
+ #---------------------Local Upload---------------------#
12
 
13
+ from FileStream.config import Telegram
14
+ from FileStream.bot import req_client, FileStream
15
+ from FileStream import utils, StartTime, __version__
16
+ from FileStream.Tools import mime_identifier, Time_ISTKolNow
17
+ from FileStream.utils.FileProcessors.custom_ul import TeleUploader
18
+ from FileStream.server.exceptions import FIleNotFound, InvalidHash
19
+ from FileStream.bot import MULTI_CLIENTS, WORK_LOADS, ACTIVE_CLIENTS
20
+ from FileStream.server.render_template import render_page, render_upload
21
 
22
 
23
  class ByteStreamer:
 
24
  def __init__(self, client: Client):
25
  self.clean_timer = 30 * 60 # Cache cleanup timer set to 30 minutes
26
  self.client: Client = client
27
  self.cached_file_ids: Dict[str, FileId] = {} # Cache to store file properties by db_id
28
+ self.last_activity: float = asyncio.get_event_loop().time() # Track last activity time for the client
29
  asyncio.create_task(self.clean_cache()) # Start the cache cleanup task
30
 
31
+ def update_last_activity(self):
32
+ """Update the last activity time to the current time."""
33
+ self.last_activity = asyncio.get_event_loop().time()
34
+
35
+ def get_last_activity(self) -> float:
36
+ """Get the last activity time of this client."""
37
+ return self.last_activity
38
+
39
  async def get_file_properties(self, db_id: str, MULTI_CLIENTS) -> FileId:
40
  """
41
  Returns the properties of a media of a specific message in a FileId class.
 
221
  """
222
  while True:
223
  await asyncio.sleep(self.clean_timer) # Wait for the cleanup interval
224
+ logging.debug("*** Cleaning cached file properties...")
225
  self.cached_file_ids.clear() # Clear the cache
226
  logging.debug("Cache cleaned.")
227
+
228
+ @staticmethod
229
+ def is_active(last_activity: float, timeout: float = 120) -> bool:
230
+ """
231
+ Determines if the client is active based on the last activity time.
232
+ If the client has been inactive for longer than the timeout period (default 30 minutes),
233
+ they are considered inactive.
234
+ """
235
+ return (asyncio.get_event_loop().time() - last_activity) < timeout