File size: 6,159 Bytes
b6cd2aa
7ed539d
af22b4b
 
90475ed
78baf01
75a98c8
e914700
af22b4b
1ef33a6
 
12a325d
 
 
b6cd2aa
bf4022b
12a325d
e914700
 
12a325d
e914700
12a325d
 
e914700
 
bf4022b
 
12a325d
af22b4b
ab86e3b
2ab6cd2
 
 
 
bf4022b
 
2ab6cd2
bf4022b
c28605d
 
 
 
fa649fc
bf4022b
 
b6cd2aa
2ab6cd2
 
dfd834e
2ab6cd2
 
 
12a325d
2ab6cd2
 
 
 
12a325d
2ab6cd2
12a325d
 
 
 
 
 
 
 
 
 
2ab6cd2
 
12a325d
2ab6cd2
12a325d
2ab6cd2
aef0b8f
b6cd2aa
af22b4b
 
 
 
 
aef0b8f
1ef33a6
ab86e3b
aef0b8f
 
 
 
 
 
 
 
 
 
 
 
ab86e3b
aef0b8f
 
b6cd2aa
af22b4b
 
 
 
 
 
 
 
 
ab86e3b
af22b4b
ab86e3b
8e6dd44
4070c52
 
12a325d
 
 
 
aef0b8f
 
 
 
 
 
 
 
 
 
 
12a325d
 
 
 
 
 
 
 
 
4070c52
 
 
 
 
7ed539d
af22b4b
 
12a325d
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
import re
import yaml
import aiohttp
import asyncio
import datetime
import sys
import traceback
from aiohttp import web, ClientTimeout, TCPConnector
from urllib.parse import parse_qs
from cachetools import TTLCache

cache = TTLCache(maxsize=1000, ttl=1800)  # 30 minutes cache

CHROME_USER_AGENT = "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/91.0.4472.124 Safari/537.36"

async def fetch_url(url, session, max_retries=3):
    headers = {"User-Agent": CHROME_USER_AGENT}
    for attempt in range(max_retries):
        try:
            async with session.get(url, headers=headers, timeout=ClientTimeout(total=40)) as response:
                response.raise_for_status()
                content = await response.read()
                return content.decode('utf-8', errors='ignore')
        except aiohttp.ClientError as e:
            print(f"Attempt {attempt + 1} failed: {str(e)}", flush=True)
            if attempt == max_retries - 1:
                raise
            await asyncio.sleep(1)

async def extract_and_transform_proxies(input_text):
    try:
        data = yaml.safe_load(input_text)
        if isinstance(data, dict) and 'proxies' in data:
            proxies_list = data['proxies']
        elif isinstance(data, list):
            proxies_list = data
        else:
            proxies_match = re.search(r'proxies:\s*\n((?:[-\s]*{.*\n?)*)', input_text, re.MULTILINE)
            if proxies_match:
                proxies_text = proxies_match.group(1)
                proxies_list = yaml.safe_load(proxies_text)
            else:
                return "未找到有效的代理配置"
    except yaml.YAMLError:
        return "YAML解析错误"

    if not proxies_list:
        return "未找到有效的代理配置"

    transformed_proxies = []

    for proxy in proxies_list:
        if proxy.get('type') in ['ss', 'trojan']:
            name = proxy.get('name', '').strip()
            server = proxy.get('server', '').strip()
            port = str(proxy.get('port', '')).strip()
            
            parts = [f"{name} = {proxy['type']}, {server}, {port}"]
            
            if proxy['type'] == 'ss':
                if 'cipher' in proxy:
                    parts.append(f"encrypt-method={proxy['cipher'].strip()}")
            elif proxy['type'] == 'trojan':
                if 'password' in proxy:
                    parts.append(f"password={proxy['password'].strip()}")
                if 'sni' in proxy:
                    parts.append(f"sni={proxy['sni'].strip()}")
                if 'skip-cert-verify' in proxy:
                    parts.append(f"skip-cert-verify={str(proxy['skip-cert-verify']).lower()}")
            
            if 'udp' in proxy:
                parts.append(f"udp-relay={'true' if proxy['udp'] in [True, 'true', 'True'] else 'false'}")

            transformed_proxies.append(", ".join(parts))

    return "\n".join(transformed_proxies)

async def handle_request(request):
    if request.path == '/':
        query_params = parse_qs(request.query_string)
        if 'url' in query_params:
            url = query_params['url'][0]
            cache_hit = False
            
            if url in cache:
                result = cache[url]
                cache_hit = True
            else:
                try:
                    async with aiohttp.ClientSession(connector=TCPConnector(ssl=False)) as session:
                        input_text = await fetch_url(url, session)
                    result = await extract_and_transform_proxies(input_text)
                    cache[url] = result
                except Exception as e:
                    print(f"Error processing request: {str(e)}", flush=True)
                    traceback.print_exc()
                    return web.Response(text=f"Error: {str(e)}", status=500)
            
            proxy_count = result.count('\n') + 1 if result and result != "未找到有效的代理配置" else 0
            return web.Response(text=result, content_type='text/plain', headers={'X-Proxy-Count': str(proxy_count), 'X-Cache-Hit': str(cache_hit)})
        else:
            usage_guide = """
            <html>
            <body>
            <h1>代理配置转换工具</h1>
            <p>使用方法:在URL参数中提供包含代理配置的网址。</p>
            <p>示例:<code>http://localhost:8080/?url=https://example.com/path-to-proxy-config</code></p>
            </body>
            </html>
            """
            return web.Response(text=usage_guide, content_type='text/html')
    else:
        return web.Response(text="Not Found", status=404)

@web.middleware
async def logging_middleware(request, handler):
    start_time = datetime.datetime.now()
    try:
        response = await handler(request)
        end_time = datetime.datetime.now()
        
        timestamp = end_time.strftime('%Y-%m-%d %H:%M:%S')
        client_ip = request.remote
        target_url = request.query.get('url', '-')
        status_code = response.status
        proxy_count = response.headers.get('X-Proxy-Count', '0')
        cache_hit = "Hit" if response.headers.get('X-Cache-Hit') == 'True' else "Miss"
        
        log_message = f"{timestamp} - {client_ip} - \"{request.method} /?url={target_url}\" - Status: {status_code} - Proxies: {proxy_count} - Cache: {cache_hit}"
        print(log_message, flush=True)
        
        return response
    except Exception as e:
        end_time = datetime.datetime.now()
        print(f"Error occurred: {str(e)}", flush=True)
        print(f"Request processing time: {end_time - start_time}", flush=True)
        print("Traceback:", flush=True)
        traceback.print_exc()
        return web.Response(text=f"Internal Server Error: {str(e)}", status=500)

async def init_app():
    app = web.Application(middlewares=[logging_middleware])
    app.router.add_get('/', handle_request)
    return app

if __name__ == "__main__":
    print(f"===== Application Startup at {datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S')} =====")
    print("Server running on port 8080")
    web.run_app(init_app(), port=8080, print=lambda _: None)