Upload 5 files
Browse files- tvproxy-main/Dockerfile +30 -0
- tvproxy-main/DockerfileHF +33 -0
- tvproxy-main/README.md +295 -0
- tvproxy-main/app.py +1040 -0
- tvproxy-main/requirements.txt +7 -0
tvproxy-main/Dockerfile
ADDED
|
@@ -0,0 +1,30 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Usa l'immagine base di Python
|
| 2 |
+
FROM python:3.12-slim
|
| 3 |
+
|
| 4 |
+
# Imposta la directory di lavoro
|
| 5 |
+
WORKDIR /app
|
| 6 |
+
|
| 7 |
+
# Copia i file necessari
|
| 8 |
+
COPY requirements.txt .
|
| 9 |
+
COPY app.py .
|
| 10 |
+
|
| 11 |
+
# Installa le dipendenze
|
| 12 |
+
RUN pip install --upgrade pip
|
| 13 |
+
RUN pip install --no-cache-dir -r requirements.txt
|
| 14 |
+
|
| 15 |
+
# Installa Gunicorn (se non è già in requirements.txt)
|
| 16 |
+
RUN pip install gunicorn
|
| 17 |
+
|
| 18 |
+
# Espone la porta 7860 per Flask/Gunicorn
|
| 19 |
+
EXPOSE 7860
|
| 20 |
+
|
| 21 |
+
# Comando per avviare il server Flask con Gunicorn e 4 worker
|
| 22 |
+
CMD ["gunicorn", "app:app", \
|
| 23 |
+
"-w", "4", \
|
| 24 |
+
"--worker-class", "gevent", \
|
| 25 |
+
"--worker-connections", "100", \
|
| 26 |
+
"-b", "0.0.0.0:7860", \
|
| 27 |
+
"--timeout", "120", \
|
| 28 |
+
"--keep-alive", "5", \
|
| 29 |
+
"--max-requests", "1000", \
|
| 30 |
+
"--max-requests-jitter", "100"]
|
tvproxy-main/DockerfileHF
ADDED
|
@@ -0,0 +1,33 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Usa l'immagine base di Python
|
| 2 |
+
FROM python:3.12-slim
|
| 3 |
+
|
| 4 |
+
# Installa git e certificati SSL
|
| 5 |
+
RUN apt-get update && apt-get install -y \
|
| 6 |
+
git \
|
| 7 |
+
ca-certificates \
|
| 8 |
+
&& rm -rf /var/lib/apt/lists/*
|
| 9 |
+
|
| 10 |
+
# Imposta la directory di lavoro
|
| 11 |
+
WORKDIR /app
|
| 12 |
+
|
| 13 |
+
# Clona il repository GitHub
|
| 14 |
+
RUN git clone https://github.com/nzo66/tvproxy .
|
| 15 |
+
COPY . .
|
| 16 |
+
|
| 17 |
+
# Installa le dipendenze
|
| 18 |
+
RUN pip install --upgrade pip
|
| 19 |
+
RUN pip install --no-cache-dir -r requirements.txt
|
| 20 |
+
|
| 21 |
+
# Espone la porta 7860 per Flask/Gunicorn
|
| 22 |
+
EXPOSE 7860
|
| 23 |
+
|
| 24 |
+
# Comando ottimizzato per avviare il server
|
| 25 |
+
CMD ["gunicorn", "app:app", \
|
| 26 |
+
"-w", "4", \
|
| 27 |
+
"--worker-class", "gevent", \
|
| 28 |
+
"--worker-connections", "100", \
|
| 29 |
+
"-b", "0.0.0.0:7860", \
|
| 30 |
+
"--timeout", "120", \
|
| 31 |
+
"--keep-alive", "5", \
|
| 32 |
+
"--max-requests", "1000", \
|
| 33 |
+
"--max-requests-jitter", "100"]
|
tvproxy-main/README.md
ADDED
|
@@ -0,0 +1,295 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# tvproxy 📺
|
| 2 |
+
|
| 3 |
+
Un server proxy leggero e dockerizzato basato su **Flask** e **Requests**, progettato per superare restrizioni e accedere a flussi M3U/M3U8 senza interruzioni.
|
| 4 |
+
|
| 5 |
+
- 📥 **Scarica e modifica** flussi `.m3u` e `.m3u8` al volo.
|
| 6 |
+
- 🔁 **Proxa i segmenti** `.ts` mantenendo header personalizzati.
|
| 7 |
+
- 🚫 **Supera restrizioni** comuni come `Referer`, `User-Agent`, ecc.
|
| 8 |
+
- 🐳 **Facilmente dockerizzabile** su qualsiasi macchina, server o piattaforma cloud.
|
| 9 |
+
|
| 10 |
+
---
|
| 11 |
+
|
| 12 |
+
## 📚 Indice
|
| 13 |
+
|
| 14 |
+
- Piattaforme di Deploy
|
| 15 |
+
- Render
|
| 16 |
+
- HuggingFace
|
| 17 |
+
- Setup Locale
|
| 18 |
+
- Docker
|
| 19 |
+
- Termux (Android)
|
| 20 |
+
- Python
|
| 21 |
+
- Utilizzo del Proxy
|
| 22 |
+
- Configurazione Proxy
|
| 23 |
+
- Gestione Docker
|
| 24 |
+
|
| 25 |
+
---
|
| 26 |
+
|
| 27 |
+
## ☁️ Piattaforme di Deploy
|
| 28 |
+
|
| 29 |
+
### ▶️ Deploy su Render
|
| 30 |
+
|
| 31 |
+
1. Vai su **Projects → New → Web Service → Public Git Repo**.
|
| 32 |
+
2. Inserisci l'URL del repository: `https://github.com/nzo66/tvproxy` e clicca **Connect**.
|
| 33 |
+
3. Scegli un nome a piacere per il servizio.
|
| 34 |
+
4. Imposta **Instance Type** su `Free` (o un'opzione a pagamento per prestazioni migliori).
|
| 35 |
+
5. **(Opzionale) Configura le variabili d'ambiente per i proxy:**
|
| 36 |
+
* Nella sezione **Environment**, aggiungi una o più variabili.
|
| 37 |
+
* **Per proxy SOCKS5:**
|
| 38 |
+
* **Key:** `SOCKS5_PROXY`
|
| 39 |
+
* **Value:** `socks5://user:pass@host:port`
|
| 40 |
+
* **Per proxy HTTP/HTTPS:**
|
| 41 |
+
* **Key 1:** `HTTP_PROXY`
|
| 42 |
+
* **Value 1:** `http://user:pass@host:port`
|
| 43 |
+
* **Key 2:** `HTTPS_PROXY`
|
| 44 |
+
* **Value 2:** `http://user:pass@host:port`
|
| 45 |
+
* **Nota:** Puoi inserire più proxy (dello stesso tipo) separandoli da una virgola. Lo script ne sceglierà uno a caso per ogni richiesta.
|
| 46 |
+
* Per maggiori dettagli, consulta la sezione Configurazione Proxy.
|
| 47 |
+
6. Clicca su **Create Web Service**.
|
| 48 |
+
|
| 49 |
+
### 🤗 Deploy su HuggingFace
|
| 50 |
+
|
| 51 |
+
1. Crea un nuovo **Space**.
|
| 52 |
+
2. Scegli un nome, seleziona **Docker** come SDK e lascia la visibilità su **Public**.
|
| 53 |
+
3. Vai su **Files** → `⋮` → **Upload file** e carica il file `DockerfileHF` dal repository, rinominandolo in **Dockerfile**.
|
| 54 |
+
4. **Configura le variabili d'ambiente per la porta:**
|
| 55 |
+
* Vai su **Settings** del tuo Space.
|
| 56 |
+
* Nella sezione **Secrets**, aggiungi un nuovo secret.
|
| 57 |
+
5. **(Opzionale) Configura un proxy HTTP/HTTPS:**
|
| 58 |
+
* HuggingFace Spaces **non supporta proxy SOCKS5**, ma puoi usare proxy HTTP/HTTPS.
|
| 59 |
+
* Nella sezione **Secrets**, aggiungi i seguenti secret (devono essere usati entrambi):
|
| 60 |
+
* **Secret 1:**
|
| 61 |
+
* **Name:** `HTTP_PROXY`
|
| 62 |
+
* **Value:** `http://user:pass@host:port,http://user:pass@host:port`
|
| 63 |
+
* **Secret 2:**
|
| 64 |
+
* **Name:** `HTTPS_PROXY`
|
| 65 |
+
* **Value:** `http://user:pass@host:port,http://user:pass@host:port`
|
| 66 |
+
* **Nota:** Entrambe le variabili devono puntare allo stesso URL del proxy HTTP.
|
| 67 |
+
6. Una volta completato il deploy, vai su `⋮` → **Embed this Space** per ottenere il **Direct URL**.
|
| 68 |
+
|
| 69 |
+
> 🔄 **Nota:** Se aggiorni il valore del proxy o altre variabili, ricorda di fare un "Factory Rebuild" dallo Space per applicare le modifiche.
|
| 70 |
+
|
| 71 |
+
---
|
| 72 |
+
|
| 73 |
+
## 💻 Setup Locale
|
| 74 |
+
|
| 75 |
+
### 🐳 Docker (Locale o Server)
|
| 76 |
+
|
| 77 |
+
#### Costruzione e Avvio
|
| 78 |
+
|
| 79 |
+
1. **Clona il repository e costruisci l'immagine Docker:**
|
| 80 |
+
```bash
|
| 81 |
+
git clone https://github.com/nzo66/tvproxy.git
|
| 82 |
+
cd tvproxy
|
| 83 |
+
docker build -t tvproxy .
|
| 84 |
+
```
|
| 85 |
+
|
| 86 |
+
2. **Avvia il container:**
|
| 87 |
+
|
| 88 |
+
* **Senza proxy:**
|
| 89 |
+
```bash
|
| 90 |
+
docker run -d -p 7860:7860 --name tvproxy tvproxy
|
| 91 |
+
```
|
| 92 |
+
|
| 93 |
+
* **Con un proxy SOCKS5:**
|
| 94 |
+
```bash
|
| 95 |
+
docker run -d -p 7860:7860 -e SOCKS5_PROXY="socks5://proxy1,socks5://proxy2" --name tvproxy tvproxy
|
| 96 |
+
```
|
| 97 |
+
* **Con un proxy HTTP/HTTPS:**
|
| 98 |
+
```bash
|
| 99 |
+
docker run -d -p 7860:7860 -e HTTP_PROXY="http://proxy.example.com:8080,http://user:pass@host:port" -e HTTPS_PROXY="http://proxy.example.com:8080,http://user:pass@host:port" --name tvproxy tvproxy
|
| 100 |
+
```
|
| 101 |
+
### 🐧 Termux (Dispositivi Android)
|
| 102 |
+
|
| 103 |
+
1. **Installa i pacchetti necessari:**
|
| 104 |
+
```bash
|
| 105 |
+
pkg update && pkg upgrade
|
| 106 |
+
pkg install git python nano -y
|
| 107 |
+
```
|
| 108 |
+
|
| 109 |
+
2. **Clona il repository e installa le dipendenze:**
|
| 110 |
+
```bash
|
| 111 |
+
git clone https://github.com/nzo66/tvproxy.git
|
| 112 |
+
cd tvproxy
|
| 113 |
+
pip install -r requirements.txt
|
| 114 |
+
```
|
| 115 |
+
|
| 116 |
+
3. **(Opzionale) Configura un proxy tramite file `.env`:**
|
| 117 |
+
```bash
|
| 118 |
+
# Crea e apri il file .env con l'editor nano
|
| 119 |
+
nano .env
|
| 120 |
+
```
|
| 121 |
+
Incolla la configurazione nel file. Puoi usare proxy SOCKS5 o HTTP/HTTPS. Salva con `Ctrl+X`, poi `Y` e `Invio`.
|
| 122 |
+
```dotenv
|
| 123 |
+
# Scegli solo un tipo di proxy (SOCKS5 o HTTP/HTTPS).
|
| 124 |
+
# Rimuovi il commento (#) dalle righe che vuoi usare.
|
| 125 |
+
|
| 126 |
+
# --- Proxy SOCKS5 (uno o più, separati da virgola) ---
|
| 127 |
+
# SOCKS5_PROXY="socks5://user:pass@host1:port,socks5://host2:port"
|
| 128 |
+
|
| 129 |
+
# --- Proxy HTTP/HTTPS (devono essere specificati entrambi) ---
|
| 130 |
+
# HTTP_PROXY="http://user:pass@host:port,http://user:pass@host:port"
|
| 131 |
+
# HTTPS_PROXY="http://user:pass@host:port,http://user:pass@host:port"
|
| 132 |
+
```
|
| 133 |
+
|
| 134 |
+
4. **Avvia il server con Gunicorn:**
|
| 135 |
+
```bash
|
| 136 |
+
gunicorn app:app -w 4 --worker-class gevent -b 0.0.0.0:7860
|
| 137 |
+
```
|
| 138 |
+
> 👉 **Consiglio:** Per un avvio più robusto, puoi usare i parametri aggiuntivi:
|
| 139 |
+
> ```bash
|
| 140 |
+
> gunicorn app:app -w 4 --worker-class gevent --worker-connections 100 -b 0.0.0.0:7860 --timeout 120 --keep-alive 5 --max-requests 1000 --max-requests-jitter 100
|
| 141 |
+
> ```
|
| 142 |
+
|
| 143 |
+
### 🐍 Python (Locale)
|
| 144 |
+
|
| 145 |
+
1. **Clona il repository:**
|
| 146 |
+
```bash
|
| 147 |
+
git clone https://github.com/nzo66/tvproxy.git
|
| 148 |
+
cd tvproxy
|
| 149 |
+
```
|
| 150 |
+
|
| 151 |
+
2. **Installa le dipendenze:**
|
| 152 |
+
```bash
|
| 153 |
+
pip install -r requirements.txt
|
| 154 |
+
```
|
| 155 |
+
|
| 156 |
+
3. **(Opzionale) Configura un proxy tramite file `.env`:**
|
| 157 |
+
Crea un file `.env` nella cartella principale e aggiungi la configurazione del proxy. Lo script lo caricherà automaticamente.
|
| 158 |
+
```bash
|
| 159 |
+
# Esempio: crea e modifica il file con nano
|
| 160 |
+
nano .env
|
| 161 |
+
```
|
| 162 |
+
**Contenuto del file `.env`:**
|
| 163 |
+
```dotenv
|
| 164 |
+
# Scegli solo un tipo di proxy (SOCKS5 o HTTP/HTTPS).
|
| 165 |
+
# Rimuovi il commento (#) dalle righe che vuoi usare.
|
| 166 |
+
|
| 167 |
+
# --- Proxy SOCKS5 (uno o più, separati da virgola) ---
|
| 168 |
+
# SOCKS5_PROXY="socks5://user:pass@host1:port,socks5://host2:port"
|
| 169 |
+
|
| 170 |
+
# --- Proxy HTTP/HTTPS (devono essere specificati entrambi) ---
|
| 171 |
+
# HTTP_PROXY="http://user:pass@host:port,http://user:pass@host:port"
|
| 172 |
+
# HTTPS_PROXY="http://user:pass@host:port,http://user:pass@host:port"
|
| 173 |
+
```
|
| 174 |
+
|
| 175 |
+
4. **Avvia il server con Gunicorn:**
|
| 176 |
+
```bash
|
| 177 |
+
gunicorn app:app -w 4 --worker-class gevent --worker-connections 100 -b 0.0.0.0:7860 --timeout 120 --keep-alive 5 --max-requests 1000 --max-requests-jitter 100
|
| 178 |
+
```
|
| 179 |
+
|
| 180 |
+
---
|
| 181 |
+
|
| 182 |
+
## 🛠️ Come Utilizzare
|
| 183 |
+
|
| 184 |
+
Sostituisci `<server-ip>` con l'IP o l'hostname del tuo server e `<URL_...>` con gli URL che vuoi proxare.
|
| 185 |
+
|
| 186 |
+
### 📡 Endpoint 1: Proxy per Liste M3U Complete
|
| 187 |
+
|
| 188 |
+
Ideale per proxare un'intera lista M3U, garantendo compatibilità con vari formati (es. Vavoo, Daddylive).
|
| 189 |
+
|
| 190 |
+
**Formato URL:**
|
| 191 |
+
```text
|
| 192 |
+
http://<server-ip>:7860/proxy?url=<URL_LISTA_M3U>
|
| 193 |
+
```
|
| 194 |
+
|
| 195 |
+
### 📺 Endpoint 2: Proxy per Singoli Flussi M3U8 (con Headers)
|
| 196 |
+
|
| 197 |
+
Specifico per proxare un singolo flusso `.m3u8`, con la possibilità di aggiungere headers HTTP personalizzati per superare protezioni specifiche.
|
| 198 |
+
|
| 199 |
+
**Formato URL Base:**
|
| 200 |
+
```text
|
| 201 |
+
http://<server-ip>:7860/proxy/m3u?url=<URL_FLUSSO_M3U8>
|
| 202 |
+
```
|
| 203 |
+
|
| 204 |
+
**Aggiungere Headers Personalizzati (Opzionale):**
|
| 205 |
+
Per aggiungere headers, accodali all'URL usando il prefisso `&h_`.
|
| 206 |
+
|
| 207 |
+
**Formato:**
|
| 208 |
+
```text
|
| 209 |
+
&h_<NOME_HEADER>=<VALORE_HEADER>
|
| 210 |
+
```
|
| 211 |
+
|
| 212 |
+
**Esempio completo con Headers:**
|
| 213 |
+
```text
|
| 214 |
+
http://<server-ip>:7860/proxy/m3u?url=https://example.com/stream.m3u8&h_user-agent=VLC/3.0.20&h_referer=https://example.com/
|
| 215 |
+
```
|
| 216 |
+
|
| 217 |
+
> ⚠️ **Attenzione:** Se i valori degli header contengono caratteri speciali, assicurati che siano correttamente **URL-encoded**.
|
| 218 |
+
|
| 219 |
+
---
|
| 220 |
+
|
| 221 |
+
## 🔒 Configurazione Proxy
|
| 222 |
+
|
| 223 |
+
L'uso dei proxy è **completamente opzionale**. Se non viene specificato alcun proxy, tutte le richieste verranno effettuate direttamente dal server. Configurali solo se hai bisogno di superare blocchi geografici o restrizioni di rete.
|
| 224 |
+
|
| 225 |
+
Lo script supporta proxy **SOCKS5**, **HTTP** e **HTTPS** tramite variabili d'ambiente o un file `.env`.
|
| 226 |
+
|
| 227 |
+
### Variabili d'Ambiente
|
| 228 |
+
|
| 229 |
+
| Variabile | Descrizione | Esempio |
|
| 230 |
+
| -------------------- | -------------------------------------------------------------------------------------------------------- | -------------------------------------------------- |
|
| 231 |
+
| `SOCKS5_PROXY` | Uno o più proxy SOCKS5, separati da virgola. | `socks5://user:pass@host:port,socks5://host2:port` |
|
| 232 |
+
| `HTTP_PROXY` | L'URL del proxy HTTP. Da usare in coppia con `HTTPS_PROXY`. | `http://user:pass@host:port,http://host:port` |
|
| 233 |
+
| `HTTPS_PROXY` | L'URL del proxy per le richieste HTTPS. Di solito è lo stesso di `HTTP_PROXY`. | `http://user:pass@host:port,http://host:port` |
|
| 234 |
+
|
| 235 |
+
### Esempio di file `.env` (per uso locale)
|
| 236 |
+
|
| 237 |
+
Crea un file `.env` nella directory principale del progetto per configurare facilmente i proxy durante lo sviluppo locale. Lo script caricherà automaticamente le variabili.
|
| 238 |
+
|
| 239 |
+
```dotenv
|
| 240 |
+
# Scegli solo un tipo di proxy (SOCKS5 o HTTP/HTTPS).
|
| 241 |
+
# Rimuovi il commento (#) dalle righe che vuoi usare.
|
| 242 |
+
|
| 243 |
+
# --- Proxy SOCKS5 ---
|
| 244 |
+
# Puoi specificare uno o più proxy, separati da virgola.
|
| 245 |
+
# SOCKS5_PROXY="socks5://user:pass@host1:port,socks5://host2:port"
|
| 246 |
+
|
| 247 |
+
# --- Proxy HTTP/HTTPS ---
|
| 248 |
+
# Devi specificare entrambe le variabili con lo stesso valore.
|
| 249 |
+
# HTTP_PROXY="http://user:pass@host:port,http://host:port"
|
| 250 |
+
# HTTPS_PROXY="http://user:pass@host:port,http://host:port"
|
| 251 |
+
```
|
| 252 |
+
|
| 253 |
+
---
|
| 254 |
+
|
| 255 |
+
## 🔐 Proxy Consigliato per Streaming
|
| 256 |
+
|
| 257 |
+
### 🌍 [proxy-cheap](https://www.proxy-cheap.com/) — HTTP & SOCKS5 Static Datacenter Dedicati
|
| 258 |
+
|
| 259 |
+
> ⚠️ **Importante:** HuggingFace supporta solo proxy **HTTP**
|
| 260 |
+
> ✅ Ideale per: Streaming, scraping, AI requests, automazioni
|
| 261 |
+
|
| 262 |
+
---
|
| 263 |
+
|
| 264 |
+
🎁 **Usa il codice coupon:** `NZO66`
|
| 265 |
+
💸 **Ottieni il 10% di sconto prima del pagamento!**
|
| 266 |
+
|
| 267 |
+
---
|
| 268 |
+
|
| 269 |
+
🔧 Affidabile, veloce, anonimo — Perfetto per chi cerca stabilità nei proxy dedicati.
|
| 270 |
+
|
| 271 |
+
---
|
| 272 |
+
|
| 273 |
+
## 🐳 Gestione Docker
|
| 274 |
+
|
| 275 |
+
- **Visualizza i log:** `docker logs -f tvproxy`
|
| 276 |
+
- **Ferma il container:** `docker stop tvproxy`
|
| 277 |
+
- **Avvia il container:** `docker start tvproxy`
|
| 278 |
+
- **Rimuovi il container:** `docker rm -f tvproxy`
|
| 279 |
+
|
| 280 |
+
---
|
| 281 |
+
|
| 282 |
+
## ✅ Caratteristiche Principali
|
| 283 |
+
|
| 284 |
+
- ✅ Supporto automatico per `.m3u` e `.m3u8`.
|
| 285 |
+
- ✅ Inoltro di headers HTTP personalizzati (`Authorization`, `Referer`, etc.).
|
| 286 |
+
- ✅ Superamento di restrizioni geografiche o di accesso.
|
| 287 |
+
- ✅ Compatibilità con qualsiasi player IPTV.
|
| 288 |
+
- ✅ Totalmente dockerizzato e pronto per il deploy.
|
| 289 |
+
- ✅ Avviabile anche direttamente con Python.
|
| 290 |
+
|
| 291 |
+
---
|
| 292 |
+
|
| 293 |
+
## 🎉 Enjoy the Stream!
|
| 294 |
+
|
| 295 |
+
> Ora puoi guardare i tuoi flussi preferiti ovunque, senza restrizioni.
|
tvproxy-main/app.py
ADDED
|
@@ -0,0 +1,1040 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from flask import Flask, request, Response, jsonify
|
| 2 |
+
import requests
|
| 3 |
+
from urllib.parse import urlparse, urljoin, quote, unquote
|
| 4 |
+
import re
|
| 5 |
+
import traceback
|
| 6 |
+
import json
|
| 7 |
+
import base64
|
| 8 |
+
from urllib.parse import quote_plus
|
| 9 |
+
import os
|
| 10 |
+
import random
|
| 11 |
+
import time
|
| 12 |
+
from cachetools import TTLCache, LRUCache
|
| 13 |
+
from dotenv import load_dotenv
|
| 14 |
+
from requests.adapters import HTTPAdapter
|
| 15 |
+
from urllib3.util.retry import Retry
|
| 16 |
+
import psutil
|
| 17 |
+
from threading import Thread, Lock
|
| 18 |
+
import weakref
|
| 19 |
+
|
| 20 |
+
app = Flask(__name__)
|
| 21 |
+
|
| 22 |
+
load_dotenv()
|
| 23 |
+
|
| 24 |
+
# --- Configurazione Generale ---
|
| 25 |
+
VERIFY_SSL = os.environ.get('VERIFY_SSL', 'false').lower() not in ('false', '0', 'no')
|
| 26 |
+
if not VERIFY_SSL:
|
| 27 |
+
print("ATTENZIONE: La verifica del certificato SSL è DISABILITATA. Questo potrebbe esporre a rischi di sicurezza.")
|
| 28 |
+
import urllib3
|
| 29 |
+
urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
|
| 30 |
+
|
| 31 |
+
# Timeout aumentato per gestire meglio i segmenti TS di grandi dimensioni
|
| 32 |
+
REQUEST_TIMEOUT = int(os.environ.get('REQUEST_TIMEOUT', 30))
|
| 33 |
+
print(f"Timeout per le richieste impostato a {REQUEST_TIMEOUT} secondi.")
|
| 34 |
+
|
| 35 |
+
# Configurazioni Keep-Alive
|
| 36 |
+
KEEP_ALIVE_TIMEOUT = int(os.environ.get('KEEP_ALIVE_TIMEOUT', 300)) # 5 minuti
|
| 37 |
+
MAX_KEEP_ALIVE_REQUESTS = int(os.environ.get('MAX_KEEP_ALIVE_REQUESTS', 1000))
|
| 38 |
+
POOL_CONNECTIONS = int(os.environ.get('POOL_CONNECTIONS', 20))
|
| 39 |
+
POOL_MAXSIZE = int(os.environ.get('POOL_MAXSIZE', 50))
|
| 40 |
+
|
| 41 |
+
print(f"Keep-Alive configurato: timeout={KEEP_ALIVE_TIMEOUT}s, max_requests={MAX_KEEP_ALIVE_REQUESTS}")
|
| 42 |
+
|
| 43 |
+
# --- Variabili globali per monitoraggio sistema ---
|
| 44 |
+
system_stats = {
|
| 45 |
+
'ram_usage': 0,
|
| 46 |
+
'ram_used_gb': 0,
|
| 47 |
+
'ram_total_gb': 0,
|
| 48 |
+
'network_sent': 0,
|
| 49 |
+
'network_recv': 0,
|
| 50 |
+
'bandwidth_usage': 0
|
| 51 |
+
}
|
| 52 |
+
|
| 53 |
+
# Pool globale di sessioni per connessioni persistenti
|
| 54 |
+
SESSION_POOL = {}
|
| 55 |
+
SESSION_LOCK = Lock()
|
| 56 |
+
|
| 57 |
+
def get_system_stats():
|
| 58 |
+
"""Ottiene le statistiche di sistema in tempo reale"""
|
| 59 |
+
global system_stats
|
| 60 |
+
|
| 61 |
+
# Memoria RAM
|
| 62 |
+
memory = psutil.virtual_memory()
|
| 63 |
+
system_stats['ram_usage'] = memory.percent
|
| 64 |
+
system_stats['ram_used_gb'] = memory.used / (1024**3) # GB
|
| 65 |
+
system_stats['ram_total_gb'] = memory.total / (1024**3) # GB
|
| 66 |
+
|
| 67 |
+
# Utilizzo di rete
|
| 68 |
+
net_io = psutil.net_io_counters()
|
| 69 |
+
system_stats['network_sent'] = net_io.bytes_sent / (1024**2) # MB
|
| 70 |
+
system_stats['network_recv'] = net_io.bytes_recv / (1024**2) # MB
|
| 71 |
+
|
| 72 |
+
return system_stats
|
| 73 |
+
|
| 74 |
+
def monitor_bandwidth():
|
| 75 |
+
"""Monitora la banda di rete in background"""
|
| 76 |
+
global system_stats
|
| 77 |
+
prev_sent = 0
|
| 78 |
+
prev_recv = 0
|
| 79 |
+
|
| 80 |
+
while True:
|
| 81 |
+
try:
|
| 82 |
+
net_io = psutil.net_io_counters()
|
| 83 |
+
current_sent = net_io.bytes_sent
|
| 84 |
+
current_recv = net_io.bytes_recv
|
| 85 |
+
|
| 86 |
+
if prev_sent > 0 and prev_recv > 0:
|
| 87 |
+
# Calcola la banda utilizzata nell'ultimo secondo (in MB/s)
|
| 88 |
+
sent_per_sec = (current_sent - prev_sent) / (1024 * 1024) # Convertito in MB/s
|
| 89 |
+
recv_per_sec = (current_recv - prev_recv) / (1024 * 1024) # Convertito in MB/s
|
| 90 |
+
system_stats['bandwidth_usage'] = sent_per_sec + recv_per_sec
|
| 91 |
+
|
| 92 |
+
prev_sent = current_sent
|
| 93 |
+
prev_recv = current_recv
|
| 94 |
+
except Exception as e:
|
| 95 |
+
print(f"Errore nel monitoraggio banda: {e}")
|
| 96 |
+
|
| 97 |
+
time.sleep(1)
|
| 98 |
+
|
| 99 |
+
def connection_manager():
|
| 100 |
+
"""Thread per gestire le connessioni persistenti"""
|
| 101 |
+
while True:
|
| 102 |
+
try:
|
| 103 |
+
time.sleep(300) # Controlla ogni 5 minuti
|
| 104 |
+
|
| 105 |
+
# Statistiche connessioni
|
| 106 |
+
with SESSION_LOCK:
|
| 107 |
+
active_sessions = len(SESSION_POOL)
|
| 108 |
+
print(f"Sessioni attive nel pool: {active_sessions}")
|
| 109 |
+
|
| 110 |
+
# Pulizia periodica delle sessioni inattive
|
| 111 |
+
if active_sessions > 10: # Se troppe sessioni, pulisci
|
| 112 |
+
cleanup_sessions()
|
| 113 |
+
|
| 114 |
+
except Exception as e:
|
| 115 |
+
print(f"Errore nel connection manager: {e}")
|
| 116 |
+
|
| 117 |
+
def cleanup_sessions():
|
| 118 |
+
"""Pulisce le sessioni inattive dal pool"""
|
| 119 |
+
global SESSION_POOL, SESSION_LOCK
|
| 120 |
+
|
| 121 |
+
with SESSION_LOCK:
|
| 122 |
+
for key, session in list(SESSION_POOL.items()):
|
| 123 |
+
try:
|
| 124 |
+
session.close()
|
| 125 |
+
except:
|
| 126 |
+
pass
|
| 127 |
+
SESSION_POOL.clear()
|
| 128 |
+
print("Pool di sessioni pulito")
|
| 129 |
+
|
| 130 |
+
# Avvia i thread di monitoraggio
|
| 131 |
+
bandwidth_thread = Thread(target=monitor_bandwidth, daemon=True)
|
| 132 |
+
bandwidth_thread.start()
|
| 133 |
+
|
| 134 |
+
connection_thread = Thread(target=connection_manager, daemon=True)
|
| 135 |
+
connection_thread.start()
|
| 136 |
+
|
| 137 |
+
# --- Configurazione Proxy ---
|
| 138 |
+
PROXY_LIST = []
|
| 139 |
+
|
| 140 |
+
def setup_proxies():
|
| 141 |
+
"""Carica la lista di proxy SOCKS5, HTTP e HTTPS dalle variabili d'ambiente."""
|
| 142 |
+
global PROXY_LIST
|
| 143 |
+
proxies_found = []
|
| 144 |
+
|
| 145 |
+
socks_proxy_list_str = os.environ.get('SOCKS5_PROXY')
|
| 146 |
+
if socks_proxy_list_str:
|
| 147 |
+
raw_socks_list = [p.strip() for p in socks_proxy_list_str.split(',') if p.strip()]
|
| 148 |
+
if raw_socks_list:
|
| 149 |
+
print(f"Trovati {len(raw_socks_list)} proxy SOCKS5. Verranno usati a rotazione.")
|
| 150 |
+
for proxy in raw_socks_list:
|
| 151 |
+
final_proxy_url = proxy
|
| 152 |
+
if proxy.startswith('socks5://'):
|
| 153 |
+
final_proxy_url = 'socks5h' + proxy[len('socks5'):]
|
| 154 |
+
print(f"Proxy SOCKS5 convertito per garantire la risoluzione DNS remota")
|
| 155 |
+
elif not proxy.startswith('socks5h://'):
|
| 156 |
+
print(f"ATTENZIONE: L'URL del proxy SOCKS5 non è un formato SOCKS5 valido (es. socks5:// o socks5h://). Potrebbe non funzionare.")
|
| 157 |
+
proxies_found.append(final_proxy_url)
|
| 158 |
+
print("Assicurati di aver installato la dipendenza per SOCKS: 'pip install PySocks'")
|
| 159 |
+
|
| 160 |
+
http_proxy_list_str = os.environ.get('HTTP_PROXY')
|
| 161 |
+
if http_proxy_list_str:
|
| 162 |
+
http_proxies = [p.strip() for p in http_proxy_list_str.split(',') if p.strip()]
|
| 163 |
+
if http_proxies:
|
| 164 |
+
print(f"Trovati {len(http_proxies)} proxy HTTP. Verranno usati a rotazione.")
|
| 165 |
+
proxies_found.extend(http_proxies)
|
| 166 |
+
|
| 167 |
+
https_proxy_list_str = os.environ.get('HTTPS_PROXY')
|
| 168 |
+
if https_proxy_list_str:
|
| 169 |
+
https_proxies = [p.strip() for p in https_proxy_list_str.split(',') if p.strip()]
|
| 170 |
+
if https_proxies:
|
| 171 |
+
print(f"Trovati {len(https_proxies)} proxy HTTPS. Verranno usati a rotazione.")
|
| 172 |
+
proxies_found.extend(https_proxies)
|
| 173 |
+
|
| 174 |
+
PROXY_LIST = proxies_found
|
| 175 |
+
|
| 176 |
+
if PROXY_LIST:
|
| 177 |
+
print(f"Totale di {len(PROXY_LIST)} proxy configurati. Verranno usati a rotazione per ogni richiesta.")
|
| 178 |
+
else:
|
| 179 |
+
print("Nessun proxy (SOCKS5, HTTP, HTTPS) configurato.")
|
| 180 |
+
|
| 181 |
+
def get_proxy_for_url(url):
|
| 182 |
+
"""Seleziona un proxy casuale dalla lista, ma lo salta per i domini GitHub."""
|
| 183 |
+
if not PROXY_LIST:
|
| 184 |
+
return None
|
| 185 |
+
|
| 186 |
+
try:
|
| 187 |
+
parsed_url = urlparse(url)
|
| 188 |
+
if 'github.com' in parsed_url.netloc:
|
| 189 |
+
return None
|
| 190 |
+
except Exception:
|
| 191 |
+
pass
|
| 192 |
+
|
| 193 |
+
chosen_proxy = random.choice(PROXY_LIST)
|
| 194 |
+
return {'http': chosen_proxy, 'https': chosen_proxy}
|
| 195 |
+
|
| 196 |
+
def create_robust_session():
|
| 197 |
+
"""Crea una sessione con configurazione robusta e keep-alive per connessioni persistenti."""
|
| 198 |
+
session = requests.Session()
|
| 199 |
+
|
| 200 |
+
# Configurazione Keep-Alive
|
| 201 |
+
session.headers.update({
|
| 202 |
+
'Connection': 'keep-alive',
|
| 203 |
+
'Keep-Alive': f'timeout={KEEP_ALIVE_TIMEOUT}, max={MAX_KEEP_ALIVE_REQUESTS}'
|
| 204 |
+
})
|
| 205 |
+
|
| 206 |
+
retry_strategy = Retry(
|
| 207 |
+
total=3,
|
| 208 |
+
read=2,
|
| 209 |
+
connect=2,
|
| 210 |
+
backoff_factor=1,
|
| 211 |
+
status_forcelist=[429, 500, 502, 503, 504],
|
| 212 |
+
allowed_methods=["HEAD", "GET", "OPTIONS"]
|
| 213 |
+
)
|
| 214 |
+
|
| 215 |
+
adapter = HTTPAdapter(
|
| 216 |
+
max_retries=retry_strategy,
|
| 217 |
+
pool_connections=POOL_CONNECTIONS,
|
| 218 |
+
pool_maxsize=POOL_MAXSIZE,
|
| 219 |
+
pool_block=False
|
| 220 |
+
)
|
| 221 |
+
|
| 222 |
+
session.mount("http://", adapter)
|
| 223 |
+
session.mount("https://", adapter)
|
| 224 |
+
|
| 225 |
+
return session
|
| 226 |
+
|
| 227 |
+
def get_persistent_session(proxy_url=None):
|
| 228 |
+
"""Ottiene una sessione persistente dal pool o ne crea una nuova"""
|
| 229 |
+
global SESSION_POOL, SESSION_LOCK
|
| 230 |
+
|
| 231 |
+
# Usa proxy_url come chiave, o 'default' se non c'è proxy
|
| 232 |
+
pool_key = proxy_url if proxy_url else 'default'
|
| 233 |
+
|
| 234 |
+
with SESSION_LOCK:
|
| 235 |
+
if pool_key not in SESSION_POOL:
|
| 236 |
+
session = create_robust_session()
|
| 237 |
+
|
| 238 |
+
# Configura proxy se fornito
|
| 239 |
+
if proxy_url:
|
| 240 |
+
session.proxies.update({'http': proxy_url, 'https': proxy_url})
|
| 241 |
+
|
| 242 |
+
SESSION_POOL[pool_key] = session
|
| 243 |
+
print(f"Nuova sessione persistente creata per: {pool_key}")
|
| 244 |
+
|
| 245 |
+
return SESSION_POOL[pool_key]
|
| 246 |
+
|
| 247 |
+
def make_persistent_request(url, headers=None, timeout=None, proxy_url=None, **kwargs):
|
| 248 |
+
"""Effettua una richiesta usando connessioni persistenti"""
|
| 249 |
+
session = get_persistent_session(proxy_url)
|
| 250 |
+
|
| 251 |
+
# Headers per keep-alive
|
| 252 |
+
request_headers = {
|
| 253 |
+
'Connection': 'keep-alive',
|
| 254 |
+
'Keep-Alive': f'timeout={KEEP_ALIVE_TIMEOUT}, max={MAX_KEEP_ALIVE_REQUESTS}'
|
| 255 |
+
}
|
| 256 |
+
|
| 257 |
+
if headers:
|
| 258 |
+
request_headers.update(headers)
|
| 259 |
+
|
| 260 |
+
try:
|
| 261 |
+
response = session.get(
|
| 262 |
+
url,
|
| 263 |
+
headers=request_headers,
|
| 264 |
+
timeout=timeout or REQUEST_TIMEOUT,
|
| 265 |
+
verify=VERIFY_SSL,
|
| 266 |
+
**kwargs
|
| 267 |
+
)
|
| 268 |
+
return response
|
| 269 |
+
except Exception as e:
|
| 270 |
+
print(f"Errore nella richiesta persistente: {e}")
|
| 271 |
+
# In caso di errore, rimuovi la sessione dal pool
|
| 272 |
+
with SESSION_LOCK:
|
| 273 |
+
if proxy_url in SESSION_POOL:
|
| 274 |
+
del SESSION_POOL[proxy_url]
|
| 275 |
+
raise
|
| 276 |
+
|
| 277 |
+
def get_dynamic_timeout(url, base_timeout=REQUEST_TIMEOUT):
|
| 278 |
+
"""Calcola timeout dinamico basato sul tipo di risorsa."""
|
| 279 |
+
if '.ts' in url.lower():
|
| 280 |
+
return base_timeout * 2 # Timeout doppio per segmenti TS
|
| 281 |
+
elif '.m3u8' in url.lower():
|
| 282 |
+
return base_timeout * 1.5 # Timeout aumentato per playlist
|
| 283 |
+
else:
|
| 284 |
+
return base_timeout
|
| 285 |
+
|
| 286 |
+
setup_proxies()
|
| 287 |
+
|
| 288 |
+
# --- Configurazione Cache ---
|
| 289 |
+
M3U8_CACHE = TTLCache(maxsize=200, ttl=5)
|
| 290 |
+
TS_CACHE = TTLCache(maxsize=1000, ttl=300)
|
| 291 |
+
KEY_CACHE = TTLCache(maxsize=200, ttl=300)
|
| 292 |
+
|
| 293 |
+
# --- Dynamic DaddyLive URL Fetcher ---
|
| 294 |
+
DADDYLIVE_BASE_URL = None
|
| 295 |
+
LAST_FETCH_TIME = 0
|
| 296 |
+
FETCH_INTERVAL = 3600
|
| 297 |
+
|
| 298 |
+
def get_daddylive_base_url():
|
| 299 |
+
"""Fetches and caches the dynamic base URL for DaddyLive."""
|
| 300 |
+
global DADDYLIVE_BASE_URL, LAST_FETCH_TIME
|
| 301 |
+
current_time = time.time()
|
| 302 |
+
|
| 303 |
+
if DADDYLIVE_BASE_URL and (current_time - LAST_FETCH_TIME < FETCH_INTERVAL):
|
| 304 |
+
return DADDYLIVE_BASE_URL
|
| 305 |
+
|
| 306 |
+
try:
|
| 307 |
+
print("Fetching dynamic DaddyLive base URL from GitHub...")
|
| 308 |
+
github_url = 'https://raw.githubusercontent.com/thecrewwh/dl_url/refs/heads/main/dl.xml'
|
| 309 |
+
response = requests.get(
|
| 310 |
+
github_url,
|
| 311 |
+
timeout=REQUEST_TIMEOUT,
|
| 312 |
+
proxies=get_proxy_for_url(github_url),
|
| 313 |
+
verify=VERIFY_SSL
|
| 314 |
+
)
|
| 315 |
+
response.raise_for_status()
|
| 316 |
+
content = response.text
|
| 317 |
+
match = re.search(r'src\s*=\s*"([^"]*)"', content)
|
| 318 |
+
if match:
|
| 319 |
+
base_url = match.group(1)
|
| 320 |
+
if not base_url.endswith('/'):
|
| 321 |
+
base_url += '/'
|
| 322 |
+
DADDYLIVE_BASE_URL = base_url
|
| 323 |
+
LAST_FETCH_TIME = current_time
|
| 324 |
+
print(f"Dynamic DaddyLive base URL updated to: {DADDYLIVE_BASE_URL}")
|
| 325 |
+
return DADDYLIVE_BASE_URL
|
| 326 |
+
except requests.RequestException as e:
|
| 327 |
+
print(f"Error fetching dynamic DaddyLive URL: {e}. Using fallback.")
|
| 328 |
+
|
| 329 |
+
DADDYLIVE_BASE_URL = "https://daddylive.sx/"
|
| 330 |
+
print(f"Using fallback DaddyLive URL: {DADDYLIVE_BASE_URL}")
|
| 331 |
+
return DADDYLIVE_BASE_URL
|
| 332 |
+
|
| 333 |
+
get_daddylive_base_url()
|
| 334 |
+
|
| 335 |
+
def detect_m3u_type(content):
|
| 336 |
+
"""Rileva se è un M3U (lista IPTV) o un M3U8 (flusso HLS)"""
|
| 337 |
+
if "#EXTM3U" in content and "#EXTINF" in content:
|
| 338 |
+
return "m3u8"
|
| 339 |
+
return "m3u"
|
| 340 |
+
|
| 341 |
+
def replace_key_uri(line, headers_query):
|
| 342 |
+
"""Sostituisce l'URI della chiave AES-128 con il proxy"""
|
| 343 |
+
match = re.search(r'URI="([^"]+)"', line)
|
| 344 |
+
if match:
|
| 345 |
+
key_url = match.group(1)
|
| 346 |
+
proxied_key_url = f"/proxy/key?url={quote(key_url)}&{headers_query}"
|
| 347 |
+
return line.replace(key_url, proxied_key_url)
|
| 348 |
+
return line
|
| 349 |
+
|
| 350 |
+
def extract_channel_id(url):
|
| 351 |
+
"""Estrae l'ID del canale da vari formati URL"""
|
| 352 |
+
match_premium = re.search(r'/premium(\d+)/mono\.m3u8$', url)
|
| 353 |
+
if match_premium:
|
| 354 |
+
return match_premium.group(1)
|
| 355 |
+
|
| 356 |
+
match_player = re.search(r'/(?:watch|stream|cast|player)/stream-(\d+)\.php', url)
|
| 357 |
+
if match_player:
|
| 358 |
+
return match_player.group(1)
|
| 359 |
+
|
| 360 |
+
return None
|
| 361 |
+
|
| 362 |
+
def process_daddylive_url(url):
|
| 363 |
+
"""Converte URL vecchi in formati compatibili con DaddyLive 2025"""
|
| 364 |
+
daddy_base_url = get_daddylive_base_url()
|
| 365 |
+
daddy_domain = urlparse(daddy_base_url).netloc
|
| 366 |
+
|
| 367 |
+
match_premium = re.search(r'/premium(\d+)/mono\.m3u8$', url)
|
| 368 |
+
if match_premium:
|
| 369 |
+
channel_id = match_premium.group(1)
|
| 370 |
+
new_url = f"{daddy_base_url}watch/stream-{channel_id}.php"
|
| 371 |
+
print(f"URL processato da {url} a {new_url}")
|
| 372 |
+
return new_url
|
| 373 |
+
|
| 374 |
+
if daddy_domain in url and any(p in url for p in ['/watch/', '/stream/', '/cast/', '/player/']):
|
| 375 |
+
return url
|
| 376 |
+
|
| 377 |
+
if url.isdigit():
|
| 378 |
+
return f"{daddy_base_url}watch/stream-{url}.php"
|
| 379 |
+
|
| 380 |
+
return url
|
| 381 |
+
|
| 382 |
+
def resolve_m3u8_link(url, headers=None):
|
| 383 |
+
"""Risolve URL DaddyLive con gestione avanzata degli errori di timeout."""
|
| 384 |
+
if not url:
|
| 385 |
+
print("Errore: URL non fornito.")
|
| 386 |
+
return {"resolved_url": None, "headers": {}}
|
| 387 |
+
|
| 388 |
+
current_headers = headers.copy() if headers else {}
|
| 389 |
+
|
| 390 |
+
clean_url = url
|
| 391 |
+
extracted_headers = {}
|
| 392 |
+
if '&h_' in url or '%26h_' in url:
|
| 393 |
+
print("Rilevati parametri header nell'URL - Estrazione in corso...")
|
| 394 |
+
temp_url = url
|
| 395 |
+
if 'vavoo.to' in temp_url.lower() and '%26' in temp_url:
|
| 396 |
+
temp_url = temp_url.replace('%26', '&')
|
| 397 |
+
|
| 398 |
+
if '%26h_' in temp_url:
|
| 399 |
+
temp_url = unquote(unquote(temp_url))
|
| 400 |
+
|
| 401 |
+
url_parts = temp_url.split('&h_', 1)
|
| 402 |
+
clean_url = url_parts[0]
|
| 403 |
+
header_params = '&h_' + url_parts[1]
|
| 404 |
+
|
| 405 |
+
for param in header_params.split('&'):
|
| 406 |
+
if param.startswith('h_'):
|
| 407 |
+
try:
|
| 408 |
+
key_value = param[2:].split('=', 1)
|
| 409 |
+
if len(key_value) == 2:
|
| 410 |
+
key = unquote(key_value[0]).replace('_', '-')
|
| 411 |
+
value = unquote(key_value[1])
|
| 412 |
+
extracted_headers[key] = value
|
| 413 |
+
except Exception as e:
|
| 414 |
+
print(f"Errore nell'estrazione dell'header {param}: {e}")
|
| 415 |
+
|
| 416 |
+
print(f"Tentativo di risoluzione URL (DaddyLive): {clean_url}")
|
| 417 |
+
|
| 418 |
+
daddy_base_url = get_daddylive_base_url()
|
| 419 |
+
daddy_origin = urlparse(daddy_base_url).scheme + "://" + urlparse(daddy_base_url).netloc
|
| 420 |
+
|
| 421 |
+
daddylive_headers = {
|
| 422 |
+
'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/136.0.0.0 Safari/537.36',
|
| 423 |
+
'Referer': daddy_base_url,
|
| 424 |
+
'Origin': daddy_origin
|
| 425 |
+
}
|
| 426 |
+
final_headers_for_resolving = {**current_headers, **daddylive_headers}
|
| 427 |
+
|
| 428 |
+
try:
|
| 429 |
+
print("Ottengo URL base dinamico...")
|
| 430 |
+
github_url = 'https://raw.githubusercontent.com/thecrewwh/dl_url/refs/heads/main/dl.xml'
|
| 431 |
+
main_url_req = requests.get(
|
| 432 |
+
github_url,
|
| 433 |
+
timeout=REQUEST_TIMEOUT,
|
| 434 |
+
proxies=get_proxy_for_url(github_url),
|
| 435 |
+
verify=VERIFY_SSL
|
| 436 |
+
)
|
| 437 |
+
main_url_req.raise_for_status()
|
| 438 |
+
main_url = main_url_req.text
|
| 439 |
+
baseurl = re.findall('(?s)src = "([^"]*)', main_url)[0]
|
| 440 |
+
print(f"URL base ottenuto: {baseurl}")
|
| 441 |
+
|
| 442 |
+
channel_id = extract_channel_id(clean_url)
|
| 443 |
+
if not channel_id:
|
| 444 |
+
print(f"Impossibile estrarre ID canale da {clean_url}")
|
| 445 |
+
return {"resolved_url": clean_url, "headers": current_headers}
|
| 446 |
+
|
| 447 |
+
print(f"ID canale estratto: {channel_id}")
|
| 448 |
+
|
| 449 |
+
stream_url = f"{baseurl}stream/stream-{channel_id}.php"
|
| 450 |
+
print(f"URL stream costruito: {stream_url}")
|
| 451 |
+
|
| 452 |
+
final_headers_for_resolving['Referer'] = baseurl + '/'
|
| 453 |
+
final_headers_for_resolving['Origin'] = baseurl
|
| 454 |
+
|
| 455 |
+
print(f"Passo 1: Richiesta a {stream_url}")
|
| 456 |
+
response = requests.get(stream_url, headers=final_headers_for_resolving, timeout=REQUEST_TIMEOUT, proxies=get_proxy_for_url(stream_url), verify=VERIFY_SSL)
|
| 457 |
+
response.raise_for_status()
|
| 458 |
+
|
| 459 |
+
iframes = re.findall(r'<a[^>]*href="([^"]+)"[^>]*>\s*<button[^>]*>\s*Player\s*2\s*<\/button>', response.text)
|
| 460 |
+
if not iframes:
|
| 461 |
+
print("Nessun link Player 2 trovato")
|
| 462 |
+
return {"resolved_url": clean_url, "headers": current_headers}
|
| 463 |
+
|
| 464 |
+
print(f"Passo 2: Trovato link Player 2: {iframes[0]}")
|
| 465 |
+
|
| 466 |
+
url2 = iframes[0]
|
| 467 |
+
url2 = baseurl + url2
|
| 468 |
+
url2 = url2.replace('//cast', '/cast')
|
| 469 |
+
|
| 470 |
+
final_headers_for_resolving['Referer'] = url2
|
| 471 |
+
final_headers_for_resolving['Origin'] = url2
|
| 472 |
+
|
| 473 |
+
print(f"Passo 3: Richiesta a Player 2: {url2}")
|
| 474 |
+
response = requests.get(url2, headers=final_headers_for_resolving, timeout=REQUEST_TIMEOUT, proxies=get_proxy_for_url(url2), verify=VERIFY_SSL)
|
| 475 |
+
response.raise_for_status()
|
| 476 |
+
|
| 477 |
+
iframes = re.findall(r'iframe src="([^"]*)', response.text)
|
| 478 |
+
if not iframes:
|
| 479 |
+
print("Nessun iframe trovato nella pagina Player 2")
|
| 480 |
+
return {"resolved_url": clean_url, "headers": current_headers}
|
| 481 |
+
|
| 482 |
+
iframe_url = iframes[0]
|
| 483 |
+
print(f"Passo 4: Trovato iframe: {iframe_url}")
|
| 484 |
+
|
| 485 |
+
print(f"Passo 5: Richiesta iframe: {iframe_url}")
|
| 486 |
+
response = requests.get(iframe_url, headers=final_headers_for_resolving, timeout=REQUEST_TIMEOUT, proxies=get_proxy_for_url(iframe_url), verify=VERIFY_SSL)
|
| 487 |
+
response.raise_for_status()
|
| 488 |
+
|
| 489 |
+
iframe_content = response.text
|
| 490 |
+
|
| 491 |
+
try:
|
| 492 |
+
channel_key = re.findall(r'(?s) channelKey = \"([^"]*)', iframe_content)[0]
|
| 493 |
+
|
| 494 |
+
auth_ts_b64 = re.findall(r'(?s)c = atob\("([^"]*)', iframe_content)[0]
|
| 495 |
+
auth_ts = base64.b64decode(auth_ts_b64).decode('utf-8')
|
| 496 |
+
|
| 497 |
+
auth_rnd_b64 = re.findall(r'(?s)d = atob\("([^"]*)', iframe_content)[0]
|
| 498 |
+
auth_rnd = base64.b64decode(auth_rnd_b64).decode('utf-8')
|
| 499 |
+
|
| 500 |
+
auth_sig_b64 = re.findall(r'(?s)e = atob\("([^"]*)', iframe_content)[0]
|
| 501 |
+
auth_sig = base64.b64decode(auth_sig_b64).decode('utf-8')
|
| 502 |
+
auth_sig = quote_plus(auth_sig)
|
| 503 |
+
|
| 504 |
+
auth_host_b64 = re.findall(r'(?s)a = atob\("([^"]*)', iframe_content)[0]
|
| 505 |
+
auth_host = base64.b64decode(auth_host_b64).decode('utf-8')
|
| 506 |
+
|
| 507 |
+
auth_php_b64 = re.findall(r'(?s)b = atob\("([^"]*)', iframe_content)[0]
|
| 508 |
+
auth_php = base64.b64decode(auth_php_b64).decode('utf-8')
|
| 509 |
+
|
| 510 |
+
print(f"Parametri estratti: channel_key={channel_key}")
|
| 511 |
+
|
| 512 |
+
except (IndexError, Exception) as e:
|
| 513 |
+
print(f"Errore estrazione parametri: {e}")
|
| 514 |
+
return {"resolved_url": clean_url, "headers": current_headers}
|
| 515 |
+
|
| 516 |
+
auth_url = f'{auth_host}{auth_php}?channel_id={channel_key}&ts={auth_ts}&rnd={auth_rnd}&sig={auth_sig}'
|
| 517 |
+
print(f"Passo 6: Autenticazione: {auth_url}")
|
| 518 |
+
|
| 519 |
+
auth_response = requests.get(auth_url, headers=final_headers_for_resolving, timeout=REQUEST_TIMEOUT, proxies=get_proxy_for_url(auth_url), verify=VERIFY_SSL)
|
| 520 |
+
auth_response.raise_for_status()
|
| 521 |
+
|
| 522 |
+
host = re.findall('(?s)m3u8 =.*?:.*?:.*?".*?".*?"([^"]*)', iframe_content)[0]
|
| 523 |
+
server_lookup = re.findall(r'n fetchWithRetry\(\s*\'([^\']*)', iframe_content)[0]
|
| 524 |
+
|
| 525 |
+
server_lookup_url = f"https://{urlparse(iframe_url).netloc}{server_lookup}{channel_key}"
|
| 526 |
+
print(f"Passo 7: Server lookup: {server_lookup_url}")
|
| 527 |
+
|
| 528 |
+
lookup_response = requests.get(server_lookup_url, headers=final_headers_for_resolving, timeout=REQUEST_TIMEOUT, proxies=get_proxy_for_url(server_lookup_url), verify=VERIFY_SSL)
|
| 529 |
+
lookup_response.raise_for_status()
|
| 530 |
+
server_data = lookup_response.json()
|
| 531 |
+
server_key = server_data['server_key']
|
| 532 |
+
|
| 533 |
+
print(f"Server key ottenuto: {server_key}")
|
| 534 |
+
|
| 535 |
+
referer_raw = f'https://{urlparse(iframe_url).netloc}'
|
| 536 |
+
|
| 537 |
+
clean_m3u8_url = f'https://{server_key}{host}{server_key}/{channel_key}/mono.m3u8'
|
| 538 |
+
|
| 539 |
+
print(f"URL M3U8 pulito costruito: {clean_m3u8_url}")
|
| 540 |
+
|
| 541 |
+
final_headers_for_fetch = {
|
| 542 |
+
'User-Agent': final_headers_for_resolving.get('User-Agent'),
|
| 543 |
+
'Referer': referer_raw,
|
| 544 |
+
'Origin': referer_raw
|
| 545 |
+
}
|
| 546 |
+
|
| 547 |
+
return {
|
| 548 |
+
"resolved_url": clean_m3u8_url,
|
| 549 |
+
"headers": final_headers_for_fetch
|
| 550 |
+
}
|
| 551 |
+
|
| 552 |
+
except (requests.exceptions.ConnectTimeout, requests.exceptions.ProxyError) as e:
|
| 553 |
+
print(f"ERRORE DI TIMEOUT O PROXY DURANTE LA RISOLUZIONE: {e}")
|
| 554 |
+
print("Questo problema è spesso legato a un proxy SOCKS5 lento, non funzionante o bloccato.")
|
| 555 |
+
print("CONSIGLI: Controlla che i tuoi proxy siano attivi. Prova ad aumentare il timeout impostando la variabile d'ambiente 'REQUEST_TIMEOUT' (es. a 20 o 30 secondi).")
|
| 556 |
+
return {"resolved_url": clean_url, "headers": current_headers}
|
| 557 |
+
except requests.exceptions.ConnectionError as e:
|
| 558 |
+
if "Read timed out" in str(e):
|
| 559 |
+
print(f"Read timeout durante la risoluzione per {clean_url}")
|
| 560 |
+
return {"resolved_url": clean_url, "headers": current_headers}
|
| 561 |
+
else:
|
| 562 |
+
print(f"Errore di connessione durante la risoluzione: {e}")
|
| 563 |
+
return {"resolved_url": clean_url, "headers": current_headers}
|
| 564 |
+
except requests.exceptions.ReadTimeout as e:
|
| 565 |
+
print(f"Read timeout esplicito per {clean_url}")
|
| 566 |
+
return {"resolved_url": clean_url, "headers": current_headers}
|
| 567 |
+
except Exception as e:
|
| 568 |
+
print(f"Errore durante la risoluzione: {e}")
|
| 569 |
+
import traceback
|
| 570 |
+
print(f"Traceback: {traceback.format_exc()}")
|
| 571 |
+
return {"resolved_url": clean_url, "headers": current_headers}
|
| 572 |
+
|
| 573 |
+
@app.route('/stats')
|
| 574 |
+
def get_stats():
|
| 575 |
+
"""Endpoint per ottenere le statistiche di sistema"""
|
| 576 |
+
stats = get_system_stats()
|
| 577 |
+
return jsonify(stats)
|
| 578 |
+
|
| 579 |
+
@app.route('/dashboard')
|
| 580 |
+
def dashboard():
|
| 581 |
+
"""Dashboard con statistiche di sistema"""
|
| 582 |
+
stats = get_system_stats()
|
| 583 |
+
daddy_base_url = get_daddylive_base_url()
|
| 584 |
+
|
| 585 |
+
dashboard_html = f"""
|
| 586 |
+
<!DOCTYPE html>
|
| 587 |
+
<html>
|
| 588 |
+
<head>
|
| 589 |
+
<title>Proxy Dashboard</title>
|
| 590 |
+
<meta http-equiv="refresh" content="5">
|
| 591 |
+
<style>
|
| 592 |
+
body {{ font-family: Arial, sans-serif; margin: 20px; background-color: #f5f5f5; }}
|
| 593 |
+
.container {{ max-width: 1200px; margin: 0 auto; }}
|
| 594 |
+
.stats-grid {{ display: grid; grid-template-columns: repeat(auto-fit, minmax(300px, 1fr)); gap: 20px; margin: 20px 0; }}
|
| 595 |
+
.stat-card {{ background: white; padding: 20px; border-radius: 8px; box-shadow: 0 2px 4px rgba(0,0,0,0.1); }}
|
| 596 |
+
.stat-title {{ font-size: 18px; font-weight: bold; color: #333; margin-bottom: 10px; }}
|
| 597 |
+
.stat-value {{ font-size: 24px; color: #007bff; }}
|
| 598 |
+
.status {{ padding: 10px; background: #d4edda; border: 1px solid #c3e6cb; border-radius: 4px; margin: 20px 0; }}
|
| 599 |
+
.progress-bar {{ width: 100%; height: 20px; background-color: #e9ecef; border-radius: 10px; overflow: hidden; }}
|
| 600 |
+
.progress-fill {{ height: 100%; background-color: #007bff; transition: width 0.3s ease; }}
|
| 601 |
+
.connection-stats {{ background: #f8f9fa; padding: 15px; border-radius: 5px; margin: 10px 0; }}
|
| 602 |
+
</style>
|
| 603 |
+
</head>
|
| 604 |
+
<body>
|
| 605 |
+
<div class="container">
|
| 606 |
+
<h1>🚀 Proxy Monitoring Dashboard</h1>
|
| 607 |
+
|
| 608 |
+
<div class="status">
|
| 609 |
+
<strong>Status:</strong> Proxy ONLINE - Base URL: {daddy_base_url}
|
| 610 |
+
</div>
|
| 611 |
+
|
| 612 |
+
<div class="connection-stats">
|
| 613 |
+
<strong>Connessioni Persistenti:</strong> {len(SESSION_POOL)} sessioni attive nel pool
|
| 614 |
+
</div>
|
| 615 |
+
|
| 616 |
+
<div class="stats-grid">
|
| 617 |
+
<div class="stat-card">
|
| 618 |
+
<div class="stat-title">💾 Utilizzo RAM</div>
|
| 619 |
+
<div class="stat-value">{stats['ram_usage']:.1f}%</div>
|
| 620 |
+
<div class="progress-bar">
|
| 621 |
+
<div class="progress-fill" style="width: {stats['ram_usage']}%"></div>
|
| 622 |
+
</div>
|
| 623 |
+
<small>{stats['ram_used_gb']:.2f} GB / {stats['ram_total_gb']:.2f} GB</small>
|
| 624 |
+
</div>
|
| 625 |
+
|
| 626 |
+
<div class="stat-card">
|
| 627 |
+
<div class="stat-title">🌐 Banda di Rete</div>
|
| 628 |
+
<div class="stat-value">{stats['bandwidth_usage']:.2f} MB/s</div>
|
| 629 |
+
<small>Utilizzo corrente della banda</small>
|
| 630 |
+
</div>
|
| 631 |
+
|
| 632 |
+
<div class="stat-card">
|
| 633 |
+
<div class="stat-title">📤 Dati Inviati</div>
|
| 634 |
+
<div class="stat-value">{stats['network_sent']:.1f} MB</div>
|
| 635 |
+
<small>Totale dalla partenza</small>
|
| 636 |
+
</div>
|
| 637 |
+
|
| 638 |
+
<div class="stat-card">
|
| 639 |
+
<div class="stat-title">📥 Dati Ricevuti</div>
|
| 640 |
+
<div class="stat-value">{stats['network_recv']:.1f} MB</div>
|
| 641 |
+
<small>Totale dalla partenza</small>
|
| 642 |
+
</div>
|
| 643 |
+
</div>
|
| 644 |
+
|
| 645 |
+
<div style="margin-top: 30px;">
|
| 646 |
+
<h3>🔗 Endpoints Disponibili:</h3>
|
| 647 |
+
<ul>
|
| 648 |
+
<li><a href="/proxy?url=URL_M3U">/proxy</a> - Proxy per liste M3U</li>
|
| 649 |
+
<li><a href="/proxy/m3u?url=URL_M3U8">/proxy/m3u</a> - Proxy per file M3U8</li>
|
| 650 |
+
<li><a href="/proxy/resolve?url=URL">/proxy/resolve</a> - Risoluzione URL DaddyLive</li>
|
| 651 |
+
<li><a href="/stats">/stats</a> - API JSON delle statistiche</li>
|
| 652 |
+
</ul>
|
| 653 |
+
</div>
|
| 654 |
+
</div>
|
| 655 |
+
</body>
|
| 656 |
+
</html>
|
| 657 |
+
"""
|
| 658 |
+
|
| 659 |
+
return dashboard_html
|
| 660 |
+
|
| 661 |
+
@app.route('/proxy/m3u')
|
| 662 |
+
def proxy_m3u():
|
| 663 |
+
"""Proxy per file M3U e M3U8 con supporto DaddyLive 2025 e caching"""
|
| 664 |
+
m3u_url = request.args.get('url', '').strip()
|
| 665 |
+
if not m3u_url:
|
| 666 |
+
return "Errore: Parametro 'url' mancante", 400
|
| 667 |
+
|
| 668 |
+
cache_key_headers = "&".join(sorted([f"{k}={v}" for k, v in request.args.items() if k.lower().startswith("h_")]))
|
| 669 |
+
cache_key = f"{m3u_url}|{cache_key_headers}"
|
| 670 |
+
|
| 671 |
+
if cache_key in M3U8_CACHE:
|
| 672 |
+
print(f"Cache HIT per M3U8: {m3u_url}")
|
| 673 |
+
cached_response = M3U8_CACHE[cache_key]
|
| 674 |
+
return Response(cached_response, content_type="application/vnd.apple.mpegurl")
|
| 675 |
+
print(f"Cache MISS per M3U8: {m3u_url}")
|
| 676 |
+
|
| 677 |
+
daddy_base_url = get_daddylive_base_url()
|
| 678 |
+
daddy_origin = urlparse(daddy_base_url).scheme + "://" + urlparse(daddy_base_url).netloc
|
| 679 |
+
|
| 680 |
+
default_headers = {
|
| 681 |
+
"User-Agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/136.0.0.0 Safari/537.36",
|
| 682 |
+
"Referer": daddy_base_url,
|
| 683 |
+
"Origin": daddy_origin
|
| 684 |
+
}
|
| 685 |
+
|
| 686 |
+
request_headers = {
|
| 687 |
+
unquote(key[2:]).replace("_", "-"): unquote(value).strip()
|
| 688 |
+
for key, value in request.args.items()
|
| 689 |
+
if key.lower().startswith("h_")
|
| 690 |
+
}
|
| 691 |
+
|
| 692 |
+
headers = {**default_headers, **request_headers}
|
| 693 |
+
|
| 694 |
+
processed_url = process_daddylive_url(m3u_url)
|
| 695 |
+
|
| 696 |
+
try:
|
| 697 |
+
print(f"Chiamata a resolve_m3u8_link per URL processato: {processed_url}")
|
| 698 |
+
result = resolve_m3u8_link(processed_url, headers)
|
| 699 |
+
if not result["resolved_url"]:
|
| 700 |
+
return "Errore: Impossibile risolvere l'URL in un M3U8 valido.", 500
|
| 701 |
+
|
| 702 |
+
resolved_url = result["resolved_url"]
|
| 703 |
+
current_headers_for_proxy = result["headers"]
|
| 704 |
+
|
| 705 |
+
print(f"Risoluzione completata. URL M3U8 finale: {resolved_url}")
|
| 706 |
+
|
| 707 |
+
if not resolved_url.endswith('.m3u8'):
|
| 708 |
+
print(f"URL risolto non è un M3U8: {resolved_url}")
|
| 709 |
+
return "Errore: Impossibile ottenere un M3U8 valido dal canale", 500
|
| 710 |
+
|
| 711 |
+
print(f"Fetching M3U8 content from clean URL: {resolved_url}")
|
| 712 |
+
print(f"Using headers: {current_headers_for_proxy}")
|
| 713 |
+
|
| 714 |
+
timeout = get_dynamic_timeout(resolved_url)
|
| 715 |
+
proxy_config = get_proxy_for_url(resolved_url)
|
| 716 |
+
proxy_key = proxy_config['http'] if proxy_config else None
|
| 717 |
+
|
| 718 |
+
m3u_response = make_persistent_request(
|
| 719 |
+
resolved_url,
|
| 720 |
+
headers=current_headers_for_proxy,
|
| 721 |
+
timeout=timeout,
|
| 722 |
+
proxy_url=proxy_key,
|
| 723 |
+
allow_redirects=True
|
| 724 |
+
)
|
| 725 |
+
m3u_response.raise_for_status()
|
| 726 |
+
|
| 727 |
+
m3u_content = m3u_response.text
|
| 728 |
+
final_url = m3u_response.url
|
| 729 |
+
|
| 730 |
+
file_type = detect_m3u_type(m3u_content)
|
| 731 |
+
if file_type == "m3u":
|
| 732 |
+
return Response(m3u_content, content_type="application/vnd.apple.mpegurl")
|
| 733 |
+
|
| 734 |
+
parsed_url = urlparse(final_url)
|
| 735 |
+
base_url = f"{parsed_url.scheme}://{parsed_url.netloc}{parsed_url.path.rsplit('/', 1)[0]}/"
|
| 736 |
+
|
| 737 |
+
headers_query = "&".join([f"h_{quote(k)}={quote(v)}" for k, v in current_headers_for_proxy.items()])
|
| 738 |
+
|
| 739 |
+
modified_m3u8 = []
|
| 740 |
+
for line in m3u_content.splitlines():
|
| 741 |
+
line = line.strip()
|
| 742 |
+
if line.startswith("#EXT-X-KEY") and 'URI="' in line:
|
| 743 |
+
line = replace_key_uri(line, headers_query)
|
| 744 |
+
elif line and not line.startswith("#"):
|
| 745 |
+
segment_url = urljoin(base_url, line)
|
| 746 |
+
line = f"/proxy/ts?url={quote(segment_url)}&{headers_query}"
|
| 747 |
+
modified_m3u8.append(line)
|
| 748 |
+
|
| 749 |
+
modified_m3u8_content = "\n".join(modified_m3u8)
|
| 750 |
+
|
| 751 |
+
M3U8_CACHE[cache_key] = modified_m3u8_content
|
| 752 |
+
|
| 753 |
+
return Response(modified_m3u8_content, content_type="application/vnd.apple.mpegurl")
|
| 754 |
+
|
| 755 |
+
except requests.RequestException as e:
|
| 756 |
+
print(f"Errore durante il download o la risoluzione del file: {str(e)}")
|
| 757 |
+
return f"Errore durante il download o la risoluzione del file M3U/M3U8: {str(e)}", 500
|
| 758 |
+
except Exception as e:
|
| 759 |
+
print(f"Errore generico nella funzione proxy_m3u: {str(e)}")
|
| 760 |
+
return f"Errore generico durante l'elaborazione: {str(e)}", 500
|
| 761 |
+
|
| 762 |
+
@app.route('/proxy/resolve')
|
| 763 |
+
def proxy_resolve():
|
| 764 |
+
"""Proxy per risolvere e restituire un URL M3U8 con metodo DaddyLive 2025"""
|
| 765 |
+
url = request.args.get('url', '').strip()
|
| 766 |
+
if not url:
|
| 767 |
+
return "Errore: Parametro 'url' mancante", 400
|
| 768 |
+
|
| 769 |
+
daddy_base_url = get_daddylive_base_url()
|
| 770 |
+
daddy_origin = urlparse(daddy_base_url).scheme + "://" + urlparse(daddy_base_url).netloc
|
| 771 |
+
|
| 772 |
+
default_headers = {
|
| 773 |
+
"User-Agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/136.0.0.0 Safari/537.36",
|
| 774 |
+
"Referer": daddy_base_url,
|
| 775 |
+
"Origin": daddy_origin
|
| 776 |
+
}
|
| 777 |
+
|
| 778 |
+
request_headers = {
|
| 779 |
+
unquote(key[2:]).replace("_", "-"): unquote(value).strip()
|
| 780 |
+
for key, value in request.args.items()
|
| 781 |
+
if key.lower().startswith("h_")
|
| 782 |
+
}
|
| 783 |
+
|
| 784 |
+
headers = {**default_headers, **request_headers}
|
| 785 |
+
|
| 786 |
+
try:
|
| 787 |
+
processed_url = process_daddylive_url(url)
|
| 788 |
+
result = resolve_m3u8_link(processed_url, headers)
|
| 789 |
+
if not result["resolved_url"]:
|
| 790 |
+
return "Errore: Impossibile risolvere l'URL", 500
|
| 791 |
+
|
| 792 |
+
headers_query = "&".join([f"h_{quote(k)}={quote(v)}" for k, v in result["headers"].items()])
|
| 793 |
+
return Response(
|
| 794 |
+
f"#EXTM3U\n"
|
| 795 |
+
f"#EXTINF:-1,Canale Risolto\n"
|
| 796 |
+
f"/proxy/m3u?url={quote(result['resolved_url'])}&{headers_query}",
|
| 797 |
+
content_type="application/vnd.apple.mpegurl"
|
| 798 |
+
)
|
| 799 |
+
|
| 800 |
+
except Exception as e:
|
| 801 |
+
return f"Errore durante la risoluzione dell'URL: {str(e)}", 500
|
| 802 |
+
|
| 803 |
+
@app.route('/proxy/ts')
|
| 804 |
+
def proxy_ts():
|
| 805 |
+
"""Proxy per segmenti .TS con connessioni persistenti, headers personalizzati e caching"""
|
| 806 |
+
ts_url = request.args.get('url', '').strip()
|
| 807 |
+
if not ts_url:
|
| 808 |
+
return "Errore: Parametro 'url' mancante", 400
|
| 809 |
+
|
| 810 |
+
if ts_url in TS_CACHE:
|
| 811 |
+
print(f"Cache HIT per TS: {ts_url}")
|
| 812 |
+
return Response(TS_CACHE[ts_url], content_type="video/mp2t")
|
| 813 |
+
print(f"Cache MISS per TS: {ts_url}")
|
| 814 |
+
|
| 815 |
+
headers = {
|
| 816 |
+
unquote(key[2:]).replace("_", "-"): unquote(value).strip()
|
| 817 |
+
for key, value in request.args.items()
|
| 818 |
+
if key.lower().startswith("h_")
|
| 819 |
+
}
|
| 820 |
+
|
| 821 |
+
proxy_config = get_proxy_for_url(ts_url)
|
| 822 |
+
proxy_key = proxy_config['http'] if proxy_config else None
|
| 823 |
+
|
| 824 |
+
ts_timeout = get_dynamic_timeout(ts_url)
|
| 825 |
+
max_retries = 3
|
| 826 |
+
|
| 827 |
+
for attempt in range(max_retries):
|
| 828 |
+
try:
|
| 829 |
+
response = make_persistent_request(
|
| 830 |
+
ts_url,
|
| 831 |
+
headers=headers,
|
| 832 |
+
timeout=ts_timeout,
|
| 833 |
+
proxy_url=proxy_key,
|
| 834 |
+
stream=True,
|
| 835 |
+
allow_redirects=True
|
| 836 |
+
)
|
| 837 |
+
response.raise_for_status()
|
| 838 |
+
|
| 839 |
+
def generate_and_cache():
|
| 840 |
+
content_parts = []
|
| 841 |
+
try:
|
| 842 |
+
for chunk in response.iter_content(chunk_size=8192):
|
| 843 |
+
if chunk:
|
| 844 |
+
content_parts.append(chunk)
|
| 845 |
+
yield chunk
|
| 846 |
+
except (requests.exceptions.ConnectionError, requests.exceptions.ReadTimeout) as e:
|
| 847 |
+
if "Read timed out" in str(e) or "timed out" in str(e).lower():
|
| 848 |
+
print(f"Timeout durante il download del segmento TS (tentativo {attempt + 1}): {ts_url}")
|
| 849 |
+
return
|
| 850 |
+
raise
|
| 851 |
+
finally:
|
| 852 |
+
ts_content = b"".join(content_parts)
|
| 853 |
+
if ts_content and len(ts_content) > 1024:
|
| 854 |
+
TS_CACHE[ts_url] = ts_content
|
| 855 |
+
print(f"Segmento TS cachato ({len(ts_content)} bytes) per: {ts_url}")
|
| 856 |
+
|
| 857 |
+
return Response(generate_and_cache(), content_type="video/mp2t")
|
| 858 |
+
|
| 859 |
+
except requests.exceptions.ConnectionError as e:
|
| 860 |
+
if "Read timed out" in str(e) or "timed out" in str(e).lower():
|
| 861 |
+
print(f"Timeout del segmento TS (tentativo {attempt + 1}/{max_retries}): {ts_url}")
|
| 862 |
+
if attempt == max_retries - 1:
|
| 863 |
+
return f"Errore: Timeout persistente per il segmento TS dopo {max_retries} tentativi", 504
|
| 864 |
+
time.sleep(2 ** attempt)
|
| 865 |
+
continue
|
| 866 |
+
else:
|
| 867 |
+
return f"Errore di connessione per il segmento TS: {str(e)}", 500
|
| 868 |
+
except requests.exceptions.ReadTimeout as e:
|
| 869 |
+
print(f"Read timeout esplicito per il segmento TS (tentativo {attempt + 1}/{max_retries}): {ts_url}")
|
| 870 |
+
if attempt == max_retries - 1:
|
| 871 |
+
return f"Errore: Read timeout persistente per il segmento TS dopo {max_retries} tentativi", 504
|
| 872 |
+
time.sleep(2 ** attempt)
|
| 873 |
+
continue
|
| 874 |
+
except requests.RequestException as e:
|
| 875 |
+
return f"Errore durante il download del segmento TS: {str(e)}", 500
|
| 876 |
+
|
| 877 |
+
@app.route('/proxy')
|
| 878 |
+
def proxy():
|
| 879 |
+
"""Proxy per liste M3U che aggiunge automaticamente /proxy/m3u?url= con IP prima dei link"""
|
| 880 |
+
m3u_url = request.args.get('url', '').strip()
|
| 881 |
+
if not m3u_url:
|
| 882 |
+
return "Errore: Parametro 'url' mancante", 400
|
| 883 |
+
|
| 884 |
+
try:
|
| 885 |
+
server_ip = request.host
|
| 886 |
+
proxy_config = get_proxy_for_url(m3u_url)
|
| 887 |
+
proxy_key = proxy_config['http'] if proxy_config else None
|
| 888 |
+
|
| 889 |
+
response = make_persistent_request(
|
| 890 |
+
m3u_url,
|
| 891 |
+
timeout=REQUEST_TIMEOUT,
|
| 892 |
+
proxy_url=proxy_key
|
| 893 |
+
)
|
| 894 |
+
response.raise_for_status()
|
| 895 |
+
m3u_content = response.text
|
| 896 |
+
|
| 897 |
+
modified_lines = []
|
| 898 |
+
current_stream_headers_params = []
|
| 899 |
+
|
| 900 |
+
for line in m3u_content.splitlines():
|
| 901 |
+
line = line.strip()
|
| 902 |
+
if line.startswith('#EXTHTTP:'):
|
| 903 |
+
try:
|
| 904 |
+
json_str = line.split(':', 1)[1].strip()
|
| 905 |
+
headers_dict = json.loads(json_str)
|
| 906 |
+
for key, value in headers_dict.items():
|
| 907 |
+
encoded_key = quote(quote(key))
|
| 908 |
+
encoded_value = quote(quote(str(value)))
|
| 909 |
+
current_stream_headers_params.append(f"h_{encoded_key}={encoded_value}")
|
| 910 |
+
except Exception as e:
|
| 911 |
+
print(f"ERROR: Errore nel parsing di #EXTHTTP '{line}': {e}")
|
| 912 |
+
modified_lines.append(line)
|
| 913 |
+
|
| 914 |
+
elif line.startswith('#EXTVLCOPT:'):
|
| 915 |
+
try:
|
| 916 |
+
options_str = line.split(':', 1)[1].strip()
|
| 917 |
+
for opt_pair in options_str.split(','):
|
| 918 |
+
opt_pair = opt_pair.strip()
|
| 919 |
+
if '=' in opt_pair:
|
| 920 |
+
key, value = opt_pair.split('=', 1)
|
| 921 |
+
key = key.strip()
|
| 922 |
+
value = value.strip().strip('"')
|
| 923 |
+
|
| 924 |
+
header_key = None
|
| 925 |
+
if key.lower() == 'http-user-agent':
|
| 926 |
+
header_key = 'User-Agent'
|
| 927 |
+
elif key.lower() == 'http-referer':
|
| 928 |
+
header_key = 'Referer'
|
| 929 |
+
elif key.lower() == 'http-cookie':
|
| 930 |
+
header_key = 'Cookie'
|
| 931 |
+
elif key.lower() == 'http-header':
|
| 932 |
+
full_header_value = value
|
| 933 |
+
if ':' in full_header_value:
|
| 934 |
+
header_name, header_val = full_header_value.split(':', 1)
|
| 935 |
+
header_key = header_name.strip()
|
| 936 |
+
value = header_val.strip()
|
| 937 |
+
else:
|
| 938 |
+
print(f"WARNING: Malformed http-header option in EXTVLCOPT: {opt_pair}")
|
| 939 |
+
continue
|
| 940 |
+
|
| 941 |
+
if header_key:
|
| 942 |
+
encoded_key = quote(quote(header_key))
|
| 943 |
+
encoded_value = quote(quote(value))
|
| 944 |
+
current_stream_headers_params.append(f"h_{encoded_key}={encoded_value}")
|
| 945 |
+
|
| 946 |
+
except Exception as e:
|
| 947 |
+
print(f"ERROR: Errore nel parsing di #EXTVLCOPT '{line}': {e}")
|
| 948 |
+
modified_lines.append(line)
|
| 949 |
+
elif line and not line.startswith('#'):
|
| 950 |
+
if 'pluto.tv' in line.lower():
|
| 951 |
+
modified_lines.append(line)
|
| 952 |
+
else:
|
| 953 |
+
encoded_line = quote(line, safe='')
|
| 954 |
+
headers_query_string = ""
|
| 955 |
+
if current_stream_headers_params:
|
| 956 |
+
headers_query_string = "%26" + "%26".join(current_stream_headers_params)
|
| 957 |
+
|
| 958 |
+
modified_line = f"http://{server_ip}/proxy/m3u?url={encoded_line}{headers_query_string}"
|
| 959 |
+
modified_lines.append(modified_line)
|
| 960 |
+
|
| 961 |
+
current_stream_headers_params = []
|
| 962 |
+
else:
|
| 963 |
+
modified_lines.append(line)
|
| 964 |
+
|
| 965 |
+
modified_content = '\n'.join(modified_lines)
|
| 966 |
+
parsed_m3u_url = urlparse(m3u_url)
|
| 967 |
+
original_filename = os.path.basename(parsed_m3u_url.path)
|
| 968 |
+
|
| 969 |
+
return Response(modified_content, content_type="application/vnd.apple.mpegurl", headers={'Content-Disposition': f'attachment; filename="{original_filename}"'})
|
| 970 |
+
|
| 971 |
+
except requests.RequestException as e:
|
| 972 |
+
print(f"ERRORE: Fallito il download di '{m3u_url}'.")
|
| 973 |
+
return f"Errore durante il download della lista M3U: {str(e)}", 500
|
| 974 |
+
except Exception as e:
|
| 975 |
+
return f"Errore generico: {str(e)}", 500
|
| 976 |
+
|
| 977 |
+
@app.route('/proxy/key')
|
| 978 |
+
def proxy_key():
|
| 979 |
+
"""Proxy per la chiave AES-128 con headers personalizzati e caching"""
|
| 980 |
+
key_url = request.args.get('url', '').strip()
|
| 981 |
+
if not key_url:
|
| 982 |
+
return "Errore: Parametro 'url' mancante per la chiave", 400
|
| 983 |
+
|
| 984 |
+
if key_url in KEY_CACHE:
|
| 985 |
+
print(f"Cache HIT per KEY: {key_url}")
|
| 986 |
+
return Response(KEY_CACHE[key_url], content_type="application/octet-stream")
|
| 987 |
+
print(f"Cache MISS per KEY: {key_url}")
|
| 988 |
+
|
| 989 |
+
headers = {
|
| 990 |
+
unquote(key[2:]).replace("_", "-"): unquote(value).strip()
|
| 991 |
+
for key, value in request.args.items()
|
| 992 |
+
if key.lower().startswith("h_")
|
| 993 |
+
}
|
| 994 |
+
|
| 995 |
+
try:
|
| 996 |
+
proxy_config = get_proxy_for_url(key_url)
|
| 997 |
+
proxy_key = proxy_config['http'] if proxy_config else None
|
| 998 |
+
|
| 999 |
+
response = make_persistent_request(
|
| 1000 |
+
key_url,
|
| 1001 |
+
headers=headers,
|
| 1002 |
+
timeout=REQUEST_TIMEOUT,
|
| 1003 |
+
proxy_url=proxy_key,
|
| 1004 |
+
allow_redirects=True
|
| 1005 |
+
)
|
| 1006 |
+
response.raise_for_status()
|
| 1007 |
+
key_content = response.content
|
| 1008 |
+
|
| 1009 |
+
KEY_CACHE[key_url] = key_content
|
| 1010 |
+
return Response(key_content, content_type="application/octet-stream")
|
| 1011 |
+
|
| 1012 |
+
except requests.RequestException as e:
|
| 1013 |
+
return f"Errore durante il download della chiave AES-128: {str(e)}", 500
|
| 1014 |
+
|
| 1015 |
+
@app.route('/')
|
| 1016 |
+
def index():
|
| 1017 |
+
"""Pagina principale con statistiche di sistema"""
|
| 1018 |
+
stats = get_system_stats()
|
| 1019 |
+
base_url = get_daddylive_base_url()
|
| 1020 |
+
|
| 1021 |
+
return f"""
|
| 1022 |
+
<h1>🚀 Proxy ONLINE</h1>
|
| 1023 |
+
<p><strong>Base URL DaddyLive:</strong> {base_url}</p>
|
| 1024 |
+
|
| 1025 |
+
<h2>📊 Statistiche Sistema</h2>
|
| 1026 |
+
<ul>
|
| 1027 |
+
<li><strong>RAM:</strong> {stats['ram_usage']:.1f}% ({stats['ram_used_gb']:.2f} GB / {stats['ram_total_gb']:.2f} GB)</li>
|
| 1028 |
+
<li><strong>Banda:</strong> {stats['bandwidth_usage']:.2f} MB/s</li>
|
| 1029 |
+
<li><strong>Dati Inviati:</strong> {stats['network_sent']:.1f} MB</li>
|
| 1030 |
+
<li><strong>Dati Ricevuti:</strong> {stats['network_recv']:.1f} MB</li>
|
| 1031 |
+
<li><strong>Connessioni Persistenti:</strong> {len(SESSION_POOL)} sessioni attive</li>
|
| 1032 |
+
</ul>
|
| 1033 |
+
|
| 1034 |
+
<p><a href="/dashboard">📈 Dashboard Completo</a> | <a href="/stats">📊 API JSON</a></p>
|
| 1035 |
+
"""
|
| 1036 |
+
|
| 1037 |
+
if __name__ == '__main__':
|
| 1038 |
+
port = int(os.environ.get("PORT", 7860))
|
| 1039 |
+
print(f"Proxy ONLINE - In ascolto su porta {port}")
|
| 1040 |
+
app.run(host="0.0.0.0", port=port, debug=False)
|
tvproxy-main/requirements.txt
ADDED
|
@@ -0,0 +1,7 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
flask
|
| 2 |
+
requests
|
| 3 |
+
gunicorn[gevent]
|
| 4 |
+
python-dotenv
|
| 5 |
+
cachetools
|
| 6 |
+
PySocks
|
| 7 |
+
psutil
|