sfun commited on
Commit
49bdf69
·
verified ·
1 Parent(s): 57fcd5b

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +51 -18
app.py CHANGED
@@ -7,9 +7,26 @@ import sys
7
  import traceback
8
  from aiohttp import web, ClientTimeout, TCPConnector
9
  from urllib.parse import parse_qs
10
- from cachetools import TTLCache
11
 
12
- cache = TTLCache(maxsize=1000, ttl=1800) # 30 minutes cache
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
13
 
14
  CHROME_USER_AGENT = "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/91.0.4472.124 Safari/537.36"
15
 
@@ -78,20 +95,17 @@ async def extract_and_transform_proxies(input_text):
78
  return "\n".join(transformed_proxies)
79
 
80
  def get_client_ip(request):
81
- """获取客户端真实IP地址"""
82
  headers_to_check = [
83
  'X-Forwarded-For',
84
  'X-Real-IP',
85
- 'CF-Connecting-IP', # Cloudflare
86
- 'True-Client-IP', # Akamai and Cloudflare
87
  'X-Client-IP',
88
  ]
89
  for header in headers_to_check:
90
  ip = request.headers.get(header)
91
  if ip:
92
- # X-Forwarded-For可能包含多个IP,我们取第一个
93
  return ip.split(',')[0].strip()
94
- # 如果没有找到,返回远程地址
95
  return request.remote
96
 
97
  async def handle_request(request):
@@ -99,24 +113,41 @@ async def handle_request(request):
99
  query_params = parse_qs(request.query_string)
100
  if 'url' in query_params:
101
  url = query_params['url'][0]
 
102
  cache_hit = False
103
-
104
- if url in cache:
105
- result = cache[url]
 
106
  cache_hit = True
107
- else:
 
 
108
  try:
109
  async with aiohttp.ClientSession(connector=TCPConnector(ssl=False)) as session:
110
  input_text = await fetch_url(url, session)
111
- result = await extract_and_transform_proxies(input_text)
112
- cache[url] = result
 
 
 
 
 
 
 
113
  except Exception as e:
114
- print(f"Error processing request: {str(e)}", flush=True)
115
- traceback.print_exc()
116
- return web.Response(text=f"Error: {str(e)}", status=500)
 
117
 
118
  proxy_count = result.count('\n') + 1 if result and result != "未找到有效的代理配置" else 0
119
- return web.Response(text=result, content_type='text/plain', headers={'X-Proxy-Count': str(proxy_count), 'X-Cache-Hit': str(cache_hit)})
 
 
 
 
 
120
  else:
121
  usage_guide = """
122
  <html>
@@ -144,8 +175,10 @@ async def logging_middleware(request, handler):
144
  status_code = response.status
145
  proxy_count = response.headers.get('X-Proxy-Count', '0')
146
  cache_hit = "Hit" if response.headers.get('X-Cache-Hit') == 'True' else "Miss"
 
 
147
 
148
- log_message = f"{timestamp} - {client_ip} - \"{request.method} /?url={target_url}\" - {status_code} Proxies: {proxy_count} - Cache: {cache_hit}"
149
  print(log_message, flush=True)
150
 
151
  return response
 
7
  import traceback
8
  from aiohttp import web, ClientTimeout, TCPConnector
9
  from urllib.parse import parse_qs
10
+ from collections import namedtuple
11
 
12
+ CacheEntry = namedtuple('CacheEntry', ['data', 'timestamp'])
13
+
14
+ class CustomCache:
15
+ def __init__(self, ttl=1800):
16
+ self.cache = {}
17
+ self.ttl = ttl
18
+
19
+ def get(self, key):
20
+ if key in self.cache:
21
+ entry = self.cache[key]
22
+ if (datetime.datetime.now() - entry.timestamp).total_seconds() < self.ttl:
23
+ return entry
24
+ return None
25
+
26
+ def set(self, key, value):
27
+ self.cache[key] = CacheEntry(value, datetime.datetime.now())
28
+
29
+ cache = CustomCache(ttl=1800) # 30 minutes cache
30
 
31
  CHROME_USER_AGENT = "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/91.0.4472.124 Safari/537.36"
32
 
 
95
  return "\n".join(transformed_proxies)
96
 
97
  def get_client_ip(request):
 
98
  headers_to_check = [
99
  'X-Forwarded-For',
100
  'X-Real-IP',
101
+ 'CF-Connecting-IP',
102
+ 'True-Client-IP',
103
  'X-Client-IP',
104
  ]
105
  for header in headers_to_check:
106
  ip = request.headers.get(header)
107
  if ip:
 
108
  return ip.split(',')[0].strip()
 
109
  return request.remote
110
 
111
  async def handle_request(request):
 
113
  query_params = parse_qs(request.query_string)
114
  if 'url' in query_params:
115
  url = query_params['url'][0]
116
+ cache_entry = cache.get(url)
117
  cache_hit = False
118
+ new_data = False
119
+
120
+ if cache_entry:
121
+ result = cache_entry.data
122
  cache_hit = True
123
+ cache_time = cache_entry.timestamp
124
+
125
+ if not cache_hit or (datetime.datetime.now() - cache_entry.timestamp).total_seconds() >= 1800:
126
  try:
127
  async with aiohttp.ClientSession(connector=TCPConnector(ssl=False)) as session:
128
  input_text = await fetch_url(url, session)
129
+ new_result = await extract_and_transform_proxies(input_text)
130
+ if new_result != "未找到有效的代理配置" and new_result != "YAML解析错误":
131
+ result = new_result
132
+ cache.set(url, result)
133
+ new_data = True
134
+ cache_time = datetime.datetime.now()
135
+ elif not cache_hit:
136
+ result = new_result
137
+ cache_time = datetime.datetime.now()
138
  except Exception as e:
139
+ if not cache_hit:
140
+ print(f"Error processing request: {str(e)}", flush=True)
141
+ traceback.print_exc()
142
+ return web.Response(text=f"Error: {str(e)}", status=500)
143
 
144
  proxy_count = result.count('\n') + 1 if result and result != "未找到有效的代理配置" else 0
145
+ return web.Response(text=result, content_type='text/plain', headers={
146
+ 'X-Proxy-Count': str(proxy_count),
147
+ 'X-Cache-Hit': str(cache_hit),
148
+ 'X-Cache-Time': cache_time.strftime('%Y-%m-%d %H:%M:%S'),
149
+ 'X-New-Data': str(new_data)
150
+ })
151
  else:
152
  usage_guide = """
153
  <html>
 
175
  status_code = response.status
176
  proxy_count = response.headers.get('X-Proxy-Count', '0')
177
  cache_hit = "Hit" if response.headers.get('X-Cache-Hit') == 'True' else "Miss"
178
+ cache_time = response.headers.get('X-Cache-Time', '-')
179
+ new_data = "Yes" if response.headers.get('X-New-Data') == 'True' else "No"
180
 
181
+ log_message = f"{timestamp} - {client_ip} - \"GET /?url={target_url}\" - Status: {status_code} - Proxies: {proxy_count} - Cache: {cache_hit} - CacheTime: {cache_time} - NewData: {new_data}"
182
  print(log_message, flush=True)
183
 
184
  return response