File size: 2,976 Bytes
44e96db
 
b5341ce
44e96db
b5341ce
 
44e96db
b5341ce
 
44e96db
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
b5341ce
44e96db
 
 
 
 
 
 
 
b5341ce
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
from asyncio import Queue, create_task
from contextlib import asynccontextmanager
from json import dumps, loads
from pathlib import Path

from apscheduler.schedulers.asyncio import AsyncIOScheduler
from fastapi import FastAPI, HTTPException
from fastapi.responses import JSONResponse, PlainTextResponse
from proxybroker import Broker
from uvicorn import run as uvicorn_run

scheduler = AsyncIOScheduler()
try:
    workdir = Path(__file__).parent
except:
    workdir = Path.cwd().parent

collected_json = workdir / 'proxies.json'
countries_list = ['US', 'CA', 'FR', 'FI', 'HR', 'ME', 'CH', 'SE', 'EE', 'DE', 'GB', 'IT', 'NL', 'PL', 'CZ', 'RS', 'RO', 'MD', 'AT', 'BE', 'BG', 'HU', 'DK', 'IS', 'KZ', 'LV', 'LT', 'LU', 'NO', 'PT', 'SK', 'SI']


def create_json_from_proxies(proxy_lines, filename):
    countries = set()
    proxies = []

    for line in proxy_lines:
        parts = line.split()
        country = parts[1]
        ping = float(parts[2].strip('s'))
        protocol = parts[3].strip('[]')
        host = parts[4].rstrip('>')

        if "HTTP:" in protocol:
            protocol = "HTTP"
            host = parts[5].rstrip(']>')

        countries.add(country)
        proxies.append({"country": country, "ping": ping, "protocol": protocol, "host": host})

    data = {
        'countries': sorted(list(countries)),
        'proxies': proxies
    }
    filename.write_text(dumps(data, indent=4))
    return filename


async def collect_proxies(proxies_queue):
    proxies_list = []
    while True:
        proxy = await proxies_queue.get()
        if proxy is None:
            break
        proxies_list.append(f'{proxy}')
        print(f"\rtotal proxies: {len(proxies_list)}", end='')
    return proxies_list


async def find_proxies():
    collected_json.write_text(dumps({'countries': None, 'proxies': []}, indent=4))
    proxies_queue = Queue()
    broker = Broker(proxies_queue, timeout=5, max_conn=200, max_tries=3, verify_ssl=True)
    await broker.find(types=['HTTP', 'HTTPS', 'SOCKS5'], countries=countries_list, limit=150)
    proxies_list = await collect_proxies(proxies_queue)
    return create_json_from_proxies(proxies_list, collected_json)


scheduler.add_job(find_proxies, 'interval', max_instances=1, minutes=60)


@asynccontextmanager
async def app_lifespan(app: FastAPI):
    scheduler.start()
    task = create_task(find_proxies())
    yield
    await task
    scheduler.shutdown()


app = FastAPI(lifespan=app_lifespan)


@app.post('*')
async def read_root():
    return HTTPException(405)


@app.get('/proxies/')
async def get_proxies():
    if collected_json.exists():
        return loads(collected_json.read_text())
    else:
        return JSONResponse({"error": "Proxy list is not ready yet."}, status_code=204)


@app.get('/')
async def read_root():
    return PlainTextResponse('ну пролапс, ну и что', status_code=200)


if __name__ == "__main__":
    uvicorn_run(app, host='0.0.0.0', port=7860, timeout_keep_alive=90)