Spaces:
Paused
Paused
Upload 9 files
Browse files- dsk/CloudflareBypasser.py +96 -0
- dsk/__pycache__/api.cpython-310.pyc +0 -0
- dsk/__pycache__/pow.cpython-310.pyc +0 -0
- dsk/api.py +286 -0
- dsk/bypass.py +87 -0
- dsk/pow.py +111 -0
- dsk/run_and_get_cookies.py +61 -0
- dsk/server.py +176 -0
- dsk/wasm/sha3_wasm_bg.7b9ca65ddd.wasm +3 -0
dsk/CloudflareBypasser.py
ADDED
@@ -0,0 +1,96 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import time
|
2 |
+
from DrissionPage import ChromiumPage
|
3 |
+
|
4 |
+
class CloudflareBypasser:
|
5 |
+
def __init__(self, driver: ChromiumPage, max_retries=-1, log=True):
|
6 |
+
self.driver = driver
|
7 |
+
self.max_retries = max_retries
|
8 |
+
self.log = log
|
9 |
+
|
10 |
+
def search_recursively_shadow_root_with_iframe(self,ele):
|
11 |
+
if ele.shadow_root:
|
12 |
+
if ele.shadow_root.child().tag == "iframe":
|
13 |
+
return ele.shadow_root.child()
|
14 |
+
else:
|
15 |
+
for child in ele.children():
|
16 |
+
result = self.search_recursively_shadow_root_with_iframe(child)
|
17 |
+
if result:
|
18 |
+
return result
|
19 |
+
return None
|
20 |
+
|
21 |
+
def search_recursively_shadow_root_with_cf_input(self,ele):
|
22 |
+
if ele.shadow_root:
|
23 |
+
if ele.shadow_root.ele("tag:input"):
|
24 |
+
return ele.shadow_root.ele("tag:input")
|
25 |
+
else:
|
26 |
+
for child in ele.children():
|
27 |
+
result = self.search_recursively_shadow_root_with_cf_input(child)
|
28 |
+
if result:
|
29 |
+
return result
|
30 |
+
return None
|
31 |
+
|
32 |
+
def locate_cf_button(self):
|
33 |
+
button = None
|
34 |
+
eles = self.driver.eles("tag:input")
|
35 |
+
for ele in eles:
|
36 |
+
if "name" in ele.attrs.keys() and "type" in ele.attrs.keys():
|
37 |
+
if "turnstile" in ele.attrs["name"] and ele.attrs["type"] == "hidden":
|
38 |
+
button = ele.parent().shadow_root.child()("tag:body").shadow_root("tag:input")
|
39 |
+
break
|
40 |
+
|
41 |
+
if button:
|
42 |
+
return button
|
43 |
+
else:
|
44 |
+
# If the button is not found, search it recursively
|
45 |
+
self.log_message("Basic search failed. Searching for button recursively.")
|
46 |
+
ele = self.driver.ele("tag:body")
|
47 |
+
iframe = self.search_recursively_shadow_root_with_iframe(ele)
|
48 |
+
if iframe:
|
49 |
+
button = self.search_recursively_shadow_root_with_cf_input(iframe("tag:body"))
|
50 |
+
else:
|
51 |
+
self.log_message("Iframe not found. Button search failed.")
|
52 |
+
return button
|
53 |
+
|
54 |
+
def log_message(self, message):
|
55 |
+
if self.log:
|
56 |
+
print(message)
|
57 |
+
|
58 |
+
def click_verification_button(self):
|
59 |
+
try:
|
60 |
+
button = self.locate_cf_button()
|
61 |
+
if button:
|
62 |
+
self.log_message("Verification button found. Attempting to click.")
|
63 |
+
button.click()
|
64 |
+
else:
|
65 |
+
self.log_message("Verification button not found.")
|
66 |
+
|
67 |
+
except Exception as e:
|
68 |
+
self.log_message(f"Error clicking verification button: {e}")
|
69 |
+
|
70 |
+
def is_bypassed(self):
|
71 |
+
try:
|
72 |
+
title = self.driver.title.lower()
|
73 |
+
return "just a moment" not in title
|
74 |
+
except Exception as e:
|
75 |
+
self.log_message(f"Error checking page title: {e}")
|
76 |
+
return False
|
77 |
+
|
78 |
+
def bypass(self):
|
79 |
+
|
80 |
+
try_count = 0
|
81 |
+
|
82 |
+
while not self.is_bypassed():
|
83 |
+
if 0 < self.max_retries + 1 <= try_count:
|
84 |
+
self.log_message("Exceeded maximum retries. Bypass failed.")
|
85 |
+
break
|
86 |
+
|
87 |
+
self.log_message(f"Attempt {try_count + 1}: Verification page detected. Trying to bypass...")
|
88 |
+
self.click_verification_button()
|
89 |
+
|
90 |
+
try_count += 1
|
91 |
+
time.sleep(2)
|
92 |
+
|
93 |
+
if self.is_bypassed():
|
94 |
+
self.log_message("Bypass successful.")
|
95 |
+
else:
|
96 |
+
self.log_message("Bypass failed.")
|
dsk/__pycache__/api.cpython-310.pyc
ADDED
Binary file (9.38 kB). View file
|
|
dsk/__pycache__/pow.cpython-310.pyc
ADDED
Binary file (3.57 kB). View file
|
|
dsk/api.py
ADDED
@@ -0,0 +1,286 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from curl_cffi import requests
|
2 |
+
from typing import Optional, Dict, Any, Generator, Literal
|
3 |
+
import json
|
4 |
+
from .pow import DeepSeekPOW
|
5 |
+
import pkg_resources
|
6 |
+
import sys
|
7 |
+
from pathlib import Path
|
8 |
+
import subprocess
|
9 |
+
import time
|
10 |
+
|
11 |
+
ThinkingMode = Literal['detailed', 'simple', 'disabled']
|
12 |
+
SearchMode = Literal['enabled', 'disabled']
|
13 |
+
|
14 |
+
class DeepSeekError(Exception):
|
15 |
+
"""Base exception for all DeepSeek API errors"""
|
16 |
+
pass
|
17 |
+
|
18 |
+
class AuthenticationError(DeepSeekError):
|
19 |
+
"""Raised when authentication fails"""
|
20 |
+
pass
|
21 |
+
|
22 |
+
class RateLimitError(DeepSeekError):
|
23 |
+
"""Raised when API rate limit is exceeded"""
|
24 |
+
pass
|
25 |
+
|
26 |
+
class NetworkError(DeepSeekError):
|
27 |
+
"""Raised when network communication fails"""
|
28 |
+
pass
|
29 |
+
|
30 |
+
class CloudflareError(DeepSeekError):
|
31 |
+
"""Raised when Cloudflare blocks the request"""
|
32 |
+
pass
|
33 |
+
|
34 |
+
class APIError(DeepSeekError):
|
35 |
+
"""Raised when API returns an error response"""
|
36 |
+
def __init__(self, message: str, status_code: Optional[int] = None):
|
37 |
+
super().__init__(message)
|
38 |
+
self.status_code = status_code
|
39 |
+
|
40 |
+
class DeepSeekAPI:
|
41 |
+
BASE_URL = "https://chat.deepseek.com/api/v0"
|
42 |
+
|
43 |
+
def __init__(self, auth_token: str):
|
44 |
+
if not auth_token or not isinstance(auth_token, str):
|
45 |
+
raise AuthenticationError("Invalid auth token provided")
|
46 |
+
|
47 |
+
try:
|
48 |
+
curl_cffi_version = pkg_resources.get_distribution('curl-cffi').version
|
49 |
+
if curl_cffi_version != '0.8.1b9':
|
50 |
+
print("\033[93mWarning: DeepSeek API requires curl-cffi version 0.8.1b9", file=sys.stderr)
|
51 |
+
print("Please install the correct version using: pip install curl-cffi==0.8.1b9\033[0m", file=sys.stderr)
|
52 |
+
except pkg_resources.DistributionNotFound:
|
53 |
+
print("\033[93mWarning: curl-cffi not found. Please install version 0.8.1b9:", file=sys.stderr)
|
54 |
+
print("pip install curl-cffi==0.8.1b9\033[0m", file=sys.stderr)
|
55 |
+
|
56 |
+
self.auth_token = auth_token
|
57 |
+
self.pow_solver = DeepSeekPOW()
|
58 |
+
|
59 |
+
# Load cookies from JSON file
|
60 |
+
cookies_path = Path(__file__).parent / 'cookies.json'
|
61 |
+
try:
|
62 |
+
with open(cookies_path, 'r') as f:
|
63 |
+
cookie_data = json.load(f)
|
64 |
+
self.cookies = cookie_data.get('cookies', {})
|
65 |
+
except (FileNotFoundError, json.JSONDecodeError) as e:
|
66 |
+
print(f"\033[93mWarning: Could not load cookies from {cookies_path}: {e}\033[0m", file=sys.stderr)
|
67 |
+
self.cookies = {}
|
68 |
+
|
69 |
+
def _get_headers(self, pow_response: Optional[str] = None) -> Dict[str, str]:
|
70 |
+
headers = {
|
71 |
+
'accept': '*/*',
|
72 |
+
'accept-language': 'en,fr-FR;q=0.9,fr;q=0.8,es-ES;q=0.7,es;q=0.6,en-US;q=0.5,am;q=0.4,de;q=0.3',
|
73 |
+
'authorization': f'Bearer {self.auth_token}',
|
74 |
+
'content-type': 'application/json',
|
75 |
+
'origin': 'https://chat.deepseek.com',
|
76 |
+
'referer': 'https://chat.deepseek.com/',
|
77 |
+
'user-agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/132.0.0.0 Safari/537.36',
|
78 |
+
'x-app-version': '20241129.1',
|
79 |
+
'x-client-locale': 'en_US',
|
80 |
+
'x-client-platform': 'web',
|
81 |
+
'x-client-version': '1.0.0-always',
|
82 |
+
}
|
83 |
+
|
84 |
+
if pow_response:
|
85 |
+
headers['x-ds-pow-response'] = pow_response
|
86 |
+
|
87 |
+
return headers
|
88 |
+
|
89 |
+
def _refresh_cookies(self) -> None:
|
90 |
+
"""Run the cookie refresh script and reload cookies"""
|
91 |
+
try:
|
92 |
+
# Get path to bypass.py
|
93 |
+
script_path = Path(__file__).parent / 'bypass.py'
|
94 |
+
|
95 |
+
# Run the script
|
96 |
+
subprocess.run([sys.executable, script_path], check=True)
|
97 |
+
|
98 |
+
# Wait briefly for cookies file to be written
|
99 |
+
time.sleep(2)
|
100 |
+
|
101 |
+
# Reload cookies
|
102 |
+
cookies_path = Path(__file__).parent / 'cookies.json'
|
103 |
+
with open(cookies_path, 'r') as f:
|
104 |
+
cookie_data = json.load(f)
|
105 |
+
self.cookies = cookie_data.get('cookies', {})
|
106 |
+
|
107 |
+
except Exception as e:
|
108 |
+
print(f"\033[93mWarning: Failed to refresh cookies: {e}\033[0m", file=sys.stderr)
|
109 |
+
|
110 |
+
def _make_request(self, method: str, endpoint: str, json_data: Dict[str, Any], pow_required: bool = False) -> Any:
|
111 |
+
url = f"{self.BASE_URL}{endpoint}"
|
112 |
+
|
113 |
+
retry_count = 0
|
114 |
+
max_retries = 2
|
115 |
+
|
116 |
+
while retry_count < max_retries:
|
117 |
+
try:
|
118 |
+
headers = self._get_headers()
|
119 |
+
if pow_required:
|
120 |
+
challenge = self._get_pow_challenge()
|
121 |
+
pow_response = self.pow_solver.solve_challenge(challenge)
|
122 |
+
headers = self._get_headers(pow_response)
|
123 |
+
|
124 |
+
response = requests.request(
|
125 |
+
method=method,
|
126 |
+
url=url,
|
127 |
+
headers=headers,
|
128 |
+
json=json_data,
|
129 |
+
cookies=self.cookies,
|
130 |
+
impersonate='chrome120',
|
131 |
+
timeout=None
|
132 |
+
)
|
133 |
+
|
134 |
+
# Check if we hit Cloudflare protection
|
135 |
+
if "<!DOCTYPE html>" in response.text and "Just a moment" in response.text:
|
136 |
+
print("\033[93mWarning: Cloudflare protection detected. Bypassing...\033[0m", file=sys.stderr)
|
137 |
+
if retry_count < max_retries - 1:
|
138 |
+
self._refresh_cookies() # Refresh cookies
|
139 |
+
retry_count += 1
|
140 |
+
continue
|
141 |
+
|
142 |
+
# Handle other response codes
|
143 |
+
if response.status_code == 401:
|
144 |
+
raise AuthenticationError("Invalid or expired authentication token")
|
145 |
+
elif response.status_code == 429:
|
146 |
+
raise RateLimitError("API rate limit exceeded")
|
147 |
+
elif response.status_code >= 500:
|
148 |
+
raise APIError(f"Server error occurred: {response.text}", response.status_code)
|
149 |
+
elif response.status_code != 200:
|
150 |
+
raise APIError(f"API request failed: {response.text}", response.status_code)
|
151 |
+
|
152 |
+
return response.json()
|
153 |
+
|
154 |
+
except requests.exceptions.RequestException as e:
|
155 |
+
raise NetworkError(f"Network error occurred: {str(e)}")
|
156 |
+
except json.JSONDecodeError:
|
157 |
+
raise APIError("Invalid JSON response from server")
|
158 |
+
|
159 |
+
raise APIError("Failed to bypass Cloudflare protection after multiple attempts")
|
160 |
+
|
161 |
+
def _get_pow_challenge(self) -> Dict[str, Any]:
|
162 |
+
try:
|
163 |
+
response = self._make_request(
|
164 |
+
'POST',
|
165 |
+
'/chat/create_pow_challenge',
|
166 |
+
{'target_path': '/api/v0/chat/completion'}
|
167 |
+
)
|
168 |
+
return response['data']['biz_data']['challenge']
|
169 |
+
except KeyError:
|
170 |
+
raise APIError("Invalid challenge response format from server")
|
171 |
+
|
172 |
+
def create_chat_session(self) -> str:
|
173 |
+
"""Creates a new chat session and returns the session ID"""
|
174 |
+
try:
|
175 |
+
response = self._make_request(
|
176 |
+
'POST',
|
177 |
+
'/chat_session/create',
|
178 |
+
{'character_id': None}
|
179 |
+
)
|
180 |
+
return response['data']['biz_data']['id']
|
181 |
+
except KeyError:
|
182 |
+
raise APIError("Invalid session creation response format from server")
|
183 |
+
|
184 |
+
def chat_completion(self,
|
185 |
+
chat_session_id: str,
|
186 |
+
prompt: str,
|
187 |
+
parent_message_id: Optional[str] = None,
|
188 |
+
thinking_enabled: bool = True,
|
189 |
+
search_enabled: bool = False) -> Generator[Dict[str, Any], None, None]:
|
190 |
+
"""
|
191 |
+
Send a message and get streaming response
|
192 |
+
|
193 |
+
Args:
|
194 |
+
chat_session_id (str): The ID of the chat session
|
195 |
+
prompt (str): The message to send
|
196 |
+
parent_message_id (Optional[str]): ID of the parent message for threading
|
197 |
+
thinking_enabled (bool): Whether to show the thinking process
|
198 |
+
search_enabled (bool): Whether to enable web search for up-to-date information
|
199 |
+
|
200 |
+
Returns:
|
201 |
+
Generator[Dict[str, Any], None, None]: Yields message chunks with content and type
|
202 |
+
|
203 |
+
Raises:
|
204 |
+
AuthenticationError: If the authentication token is invalid
|
205 |
+
RateLimitError: If the API rate limit is exceeded
|
206 |
+
NetworkError: If a network error occurs
|
207 |
+
APIError: If any other API error occurs
|
208 |
+
"""
|
209 |
+
if not prompt or not isinstance(prompt, str):
|
210 |
+
raise ValueError("Prompt must be a non-empty string")
|
211 |
+
if not chat_session_id or not isinstance(chat_session_id, str):
|
212 |
+
raise ValueError("Chat session ID must be a non-empty string")
|
213 |
+
|
214 |
+
json_data = {
|
215 |
+
'chat_session_id': chat_session_id,
|
216 |
+
'parent_message_id': parent_message_id,
|
217 |
+
'prompt': prompt,
|
218 |
+
'ref_file_ids': [],
|
219 |
+
'thinking_enabled': thinking_enabled,
|
220 |
+
'search_enabled': search_enabled,
|
221 |
+
}
|
222 |
+
|
223 |
+
try:
|
224 |
+
headers = self._get_headers(
|
225 |
+
pow_response=self.pow_solver.solve_challenge(
|
226 |
+
self._get_pow_challenge()
|
227 |
+
)
|
228 |
+
)
|
229 |
+
|
230 |
+
response = requests.post(
|
231 |
+
f"{self.BASE_URL}/chat/completion",
|
232 |
+
headers=headers,
|
233 |
+
json=json_data,
|
234 |
+
cookies=self.cookies, # Add cookies
|
235 |
+
impersonate='chrome120',
|
236 |
+
stream=True,
|
237 |
+
timeout=None
|
238 |
+
)
|
239 |
+
|
240 |
+
if response.status_code != 200:
|
241 |
+
error_text = next(response.iter_lines(), b'').decode('utf-8', 'ignore')
|
242 |
+
if response.status_code == 401:
|
243 |
+
raise AuthenticationError("Invalid or expired authentication token")
|
244 |
+
elif response.status_code == 429:
|
245 |
+
raise RateLimitError("API rate limit exceeded")
|
246 |
+
else:
|
247 |
+
raise APIError(f"API request failed: {error_text}", response.status_code)
|
248 |
+
|
249 |
+
for chunk in response.iter_lines():
|
250 |
+
try:
|
251 |
+
parsed = self._parse_chunk(chunk)
|
252 |
+
if parsed:
|
253 |
+
yield parsed
|
254 |
+
if parsed.get('finish_reason') == 'stop':
|
255 |
+
break
|
256 |
+
except Exception as e:
|
257 |
+
raise APIError(f"Error parsing response chunk: {str(e)}")
|
258 |
+
|
259 |
+
except requests.exceptions.RequestException as e:
|
260 |
+
raise NetworkError(f"Network error occurred during streaming: {str(e)}")
|
261 |
+
|
262 |
+
def _parse_chunk(self, chunk: bytes) -> Optional[Dict[str, Any]]:
|
263 |
+
"""Parse a SSE chunk from the API response"""
|
264 |
+
if not chunk:
|
265 |
+
return None
|
266 |
+
|
267 |
+
try:
|
268 |
+
if chunk.startswith(b'data: '):
|
269 |
+
data = json.loads(chunk[6:])
|
270 |
+
|
271 |
+
if 'choices' in data and data['choices']:
|
272 |
+
choice = data['choices'][0]
|
273 |
+
if 'delta' in choice:
|
274 |
+
delta = choice['delta']
|
275 |
+
|
276 |
+
return {
|
277 |
+
'content': delta.get('content', ''),
|
278 |
+
'type': delta.get('type', ''),
|
279 |
+
'finish_reason': choice.get('finish_reason')
|
280 |
+
}
|
281 |
+
except json.JSONDecodeError:
|
282 |
+
raise APIError("Invalid JSON in response chunk")
|
283 |
+
except Exception as e:
|
284 |
+
raise APIError(f"Error parsing chunk: {str(e)}")
|
285 |
+
|
286 |
+
return None
|
dsk/bypass.py
ADDED
@@ -0,0 +1,87 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# run_and_get_cookies.py
|
2 |
+
import subprocess
|
3 |
+
import os
|
4 |
+
import sys
|
5 |
+
import time
|
6 |
+
import requests
|
7 |
+
import json
|
8 |
+
|
9 |
+
def validate_cookies(cookies_data):
|
10 |
+
"""Validate that cf_clearance cookie is present and not empty"""
|
11 |
+
cookies = cookies_data.get('cookies', {})
|
12 |
+
return 'cf_clearance' in cookies and cookies['cf_clearance'].strip() != ''
|
13 |
+
|
14 |
+
def get_and_save_cookies(server_url, cookie_file_path, max_retries=3):
|
15 |
+
for attempt in range(max_retries):
|
16 |
+
try:
|
17 |
+
response = requests.get(server_url)
|
18 |
+
response.raise_for_status()
|
19 |
+
cookies_data = response.json()
|
20 |
+
|
21 |
+
if not validate_cookies(cookies_data):
|
22 |
+
print(f"Attempt {attempt + 1}: cf_clearance cookie not found, retrying...")
|
23 |
+
time.sleep(5)
|
24 |
+
continue
|
25 |
+
|
26 |
+
cookies_to_save = {
|
27 |
+
'cookies': cookies_data.get('cookies', {}),
|
28 |
+
'user_agent': cookies_data.get('user_agent', '')
|
29 |
+
}
|
30 |
+
|
31 |
+
os.makedirs(os.path.dirname(cookie_file_path), exist_ok=True)
|
32 |
+
with open(cookie_file_path, 'w', encoding='utf-8') as f:
|
33 |
+
json.dump(cookies_to_save, f, indent=4, ensure_ascii=False)
|
34 |
+
print("Successfully obtained and saved cookies with cf_clearance!")
|
35 |
+
return True
|
36 |
+
|
37 |
+
except requests.exceptions.ConnectionError as e:
|
38 |
+
print(f"Connection error on attempt {attempt + 1}: {str(e)}")
|
39 |
+
if attempt < max_retries - 1:
|
40 |
+
time.sleep(5)
|
41 |
+
else:
|
42 |
+
print("Max retries reached. Failed to get valid cookies.")
|
43 |
+
return False
|
44 |
+
|
45 |
+
print("Failed to obtain valid cf_clearance cookie after all attempts")
|
46 |
+
return False
|
47 |
+
|
48 |
+
def run_server_background():
|
49 |
+
script_dir = os.path.dirname(os.path.abspath(__file__))
|
50 |
+
server_script = os.path.abspath(os.path.join(script_dir, "server.py"))
|
51 |
+
server_dir = os.path.dirname(server_script)
|
52 |
+
|
53 |
+
os.makedirs(server_dir, exist_ok=True)
|
54 |
+
|
55 |
+
try:
|
56 |
+
process = subprocess.Popen(
|
57 |
+
[sys.executable, server_script],
|
58 |
+
stdout=subprocess.DEVNULL,
|
59 |
+
stderr=subprocess.DEVNULL,
|
60 |
+
cwd=server_dir,
|
61 |
+
start_new_session=True
|
62 |
+
)
|
63 |
+
return process
|
64 |
+
except Exception:
|
65 |
+
return None
|
66 |
+
|
67 |
+
if __name__ == "__main__":
|
68 |
+
print("Getting the cookies...")
|
69 |
+
server_process = run_server_background()
|
70 |
+
|
71 |
+
if server_process:
|
72 |
+
# Increase initial wait time to ensure server is fully started
|
73 |
+
time.sleep(10)
|
74 |
+
server_url = "http://localhost:8000/cookies?url=https://chat.deepseek.com"
|
75 |
+
cookie_file = "dsk/cookies.json"
|
76 |
+
|
77 |
+
# Increase max retries for more reliability
|
78 |
+
success = get_and_save_cookies(server_url, cookie_file, max_retries=5)
|
79 |
+
|
80 |
+
if not success:
|
81 |
+
print("Failed to obtain valid cookies.")
|
82 |
+
server_process.terminate()
|
83 |
+
sys.exit(1)
|
84 |
+
server_process.terminate()
|
85 |
+
else:
|
86 |
+
print("Failed to start server.")
|
87 |
+
sys.exit(1)
|
dsk/pow.py
ADDED
@@ -0,0 +1,111 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""
|
2 |
+
DeepSeek Proof of Work Challenge Implementation
|
3 |
+
Author: @xtekky
|
4 |
+
Date: 2024
|
5 |
+
|
6 |
+
This module implements a proof-of-work challenge solver using WebAssembly (WASM)
|
7 |
+
for Custom sha3 hashing. It provides functionality to solve computational challenges
|
8 |
+
required for authentication or rate limiting purposes.
|
9 |
+
"""
|
10 |
+
|
11 |
+
import json
|
12 |
+
import base64
|
13 |
+
import wasmtime
|
14 |
+
import numpy as np
|
15 |
+
from typing import Dict, Any
|
16 |
+
import os
|
17 |
+
|
18 |
+
WASM_PATH = f'{os.path.dirname(__file__)}/wasm/sha3_wasm_bg.7b9ca65ddd.wasm'
|
19 |
+
|
20 |
+
class DeepSeekHash:
|
21 |
+
def __init__(self):
|
22 |
+
self.instance = None
|
23 |
+
self.memory = None
|
24 |
+
self.store = None
|
25 |
+
|
26 |
+
def init(self, wasm_path: str):
|
27 |
+
engine = wasmtime.Engine()
|
28 |
+
|
29 |
+
with open(wasm_path, 'rb') as f:
|
30 |
+
wasm_bytes = f.read()
|
31 |
+
|
32 |
+
module = wasmtime.Module(engine, wasm_bytes)
|
33 |
+
|
34 |
+
self.store = wasmtime.Store(engine)
|
35 |
+
linker = wasmtime.Linker(engine)
|
36 |
+
linker.define_wasi()
|
37 |
+
|
38 |
+
self.instance = linker.instantiate(self.store, module)
|
39 |
+
self.memory = self.instance.exports(self.store)["memory"]
|
40 |
+
|
41 |
+
return self
|
42 |
+
|
43 |
+
def _write_to_memory(self, text: str) -> tuple[int, int]:
|
44 |
+
encoded = text.encode('utf-8')
|
45 |
+
length = len(encoded)
|
46 |
+
ptr = self.instance.exports(self.store)["__wbindgen_export_0"](self.store, length, 1)
|
47 |
+
|
48 |
+
memory_view = self.memory.data_ptr(self.store)
|
49 |
+
for i, byte in enumerate(encoded):
|
50 |
+
memory_view[ptr + i] = byte
|
51 |
+
|
52 |
+
return ptr, length
|
53 |
+
|
54 |
+
def calculate_hash(self, algorithm: str, challenge: str, salt: str,
|
55 |
+
difficulty: int, expire_at: int) -> float:
|
56 |
+
|
57 |
+
prefix = f"{salt}_{expire_at}_"
|
58 |
+
retptr = self.instance.exports(self.store)["__wbindgen_add_to_stack_pointer"](self.store, -16)
|
59 |
+
|
60 |
+
try:
|
61 |
+
challenge_ptr, challenge_len = self._write_to_memory(challenge)
|
62 |
+
prefix_ptr, prefix_len = self._write_to_memory(prefix)
|
63 |
+
|
64 |
+
self.instance.exports(self.store)["wasm_solve"](
|
65 |
+
self.store,
|
66 |
+
retptr,
|
67 |
+
challenge_ptr,
|
68 |
+
challenge_len,
|
69 |
+
prefix_ptr,
|
70 |
+
prefix_len,
|
71 |
+
float(difficulty)
|
72 |
+
)
|
73 |
+
|
74 |
+
memory_view = self.memory.data_ptr(self.store)
|
75 |
+
status = int.from_bytes(bytes(memory_view[retptr:retptr + 4]), byteorder='little', signed=True)
|
76 |
+
|
77 |
+
if status == 0:
|
78 |
+
return None
|
79 |
+
|
80 |
+
value_bytes = bytes(memory_view[retptr + 8:retptr + 16])
|
81 |
+
value = np.frombuffer(value_bytes, dtype=np.float64)[0]
|
82 |
+
|
83 |
+
return int(value)
|
84 |
+
|
85 |
+
finally:
|
86 |
+
self.instance.exports(self.store)["__wbindgen_add_to_stack_pointer"](self.store, 16)
|
87 |
+
|
88 |
+
class DeepSeekPOW:
|
89 |
+
def __init__(self):
|
90 |
+
self.hasher = DeepSeekHash().init(WASM_PATH)
|
91 |
+
|
92 |
+
def solve_challenge(self, config: Dict[str, Any]) -> str:
|
93 |
+
"""Solves a proof-of-work challenge and returns the encoded response"""
|
94 |
+
answer = self.hasher.calculate_hash(
|
95 |
+
config['algorithm'],
|
96 |
+
config['challenge'],
|
97 |
+
config['salt'],
|
98 |
+
config['difficulty'],
|
99 |
+
config['expire_at']
|
100 |
+
)
|
101 |
+
|
102 |
+
result = {
|
103 |
+
'algorithm': config['algorithm'],
|
104 |
+
'challenge': config['challenge'],
|
105 |
+
'salt': config['salt'],
|
106 |
+
'answer': answer,
|
107 |
+
'signature': config['signature'],
|
108 |
+
'target_path': config['target_path']
|
109 |
+
}
|
110 |
+
|
111 |
+
return base64.b64encode(json.dumps(result).encode()).decode()
|
dsk/run_and_get_cookies.py
ADDED
@@ -0,0 +1,61 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# run_and_get_cookies.py
|
2 |
+
import subprocess
|
3 |
+
import os
|
4 |
+
import sys
|
5 |
+
import time
|
6 |
+
import requests
|
7 |
+
import json
|
8 |
+
|
9 |
+
def get_and_save_cookies(server_url, cookie_file_path):
|
10 |
+
for attempt in range(5):
|
11 |
+
try:
|
12 |
+
response = requests.get(server_url)
|
13 |
+
response.raise_for_status()
|
14 |
+
cookies_data = response.json()
|
15 |
+
|
16 |
+
cookies_to_save = {
|
17 |
+
'cookies': cookies_data.get('cookies', {}),
|
18 |
+
'user_agent': cookies_data.get('user_agent', '')
|
19 |
+
}
|
20 |
+
|
21 |
+
os.makedirs(os.path.dirname(cookie_file_path), exist_ok=True)
|
22 |
+
with open(cookie_file_path, 'w', encoding='utf-8') as f:
|
23 |
+
json.dump(cookies_to_save, f, indent=4, ensure_ascii=False)
|
24 |
+
return
|
25 |
+
|
26 |
+
except requests.exceptions.ConnectionError as e:
|
27 |
+
if attempt < 4:
|
28 |
+
time.sleep(5)
|
29 |
+
else:
|
30 |
+
raise
|
31 |
+
|
32 |
+
def run_server_background():
|
33 |
+
script_dir = os.path.dirname(os.path.abspath(__file__))
|
34 |
+
server_script = os.path.abspath(os.path.join(script_dir, "server.py"))
|
35 |
+
server_dir = os.path.dirname(server_script)
|
36 |
+
|
37 |
+
os.makedirs(server_dir, exist_ok=True)
|
38 |
+
|
39 |
+
try:
|
40 |
+
process = subprocess.Popen(
|
41 |
+
[sys.executable, server_script],
|
42 |
+
stdout=subprocess.DEVNULL,
|
43 |
+
stderr=subprocess.DEVNULL,
|
44 |
+
cwd=server_dir,
|
45 |
+
start_new_session=True
|
46 |
+
)
|
47 |
+
return process
|
48 |
+
except Exception:
|
49 |
+
return None
|
50 |
+
|
51 |
+
if __name__ == "__main__":
|
52 |
+
print("Getting the cookies...")
|
53 |
+
server_process = run_server_background()
|
54 |
+
|
55 |
+
if server_process:
|
56 |
+
time.sleep(5)
|
57 |
+
server_url = "http://localhost:8000/cookies?url=https://chat.deepseek.com"
|
58 |
+
cookie_file = "dsk/cookies.json"
|
59 |
+
get_and_save_cookies(server_url, cookie_file)
|
60 |
+
else:
|
61 |
+
print("Failed to start server.")
|
dsk/server.py
ADDED
@@ -0,0 +1,176 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import json
|
2 |
+
import re
|
3 |
+
import os
|
4 |
+
from urllib.parse import urlparse
|
5 |
+
|
6 |
+
from CloudflareBypasser import CloudflareBypasser
|
7 |
+
from DrissionPage import ChromiumPage, ChromiumOptions
|
8 |
+
from fastapi import FastAPI, HTTPException, Response
|
9 |
+
from pydantic import BaseModel
|
10 |
+
from typing import Dict
|
11 |
+
import argparse
|
12 |
+
|
13 |
+
from pyvirtualdisplay import Display
|
14 |
+
import uvicorn
|
15 |
+
import atexit
|
16 |
+
import time
|
17 |
+
|
18 |
+
# Check if running in Docker mode
|
19 |
+
DOCKER_MODE = os.getenv("DOCKERMODE", "false").lower() == "true"
|
20 |
+
|
21 |
+
SERVER_PORT = int(os.getenv("SERVER_PORT", 8000))
|
22 |
+
|
23 |
+
# Chromium options arguments
|
24 |
+
arguments = [
|
25 |
+
# "--remote-debugging-port=9222", # Add this line for remote debugging
|
26 |
+
"-no-first-run",
|
27 |
+
"-force-color-profile=srgb",
|
28 |
+
"-metrics-recording-only",
|
29 |
+
"-password-store=basic",
|
30 |
+
"-use-mock-keychain",
|
31 |
+
"-export-tagged-pdf",
|
32 |
+
"-no-default-browser-check",
|
33 |
+
"-disable-background-mode",
|
34 |
+
"-enable-features=NetworkService,NetworkServiceInProcess,LoadCryptoTokenExtension,PermuteTLSExtensions",
|
35 |
+
"-disable-features=FlashDeprecationWarning,EnablePasswordsAccountStorage",
|
36 |
+
"-deny-permission-prompts",
|
37 |
+
"-disable-gpu",
|
38 |
+
"-accept-lang=en-US",
|
39 |
+
#"-incognito" # You can add this line to open the browser in incognito mode by default
|
40 |
+
]
|
41 |
+
|
42 |
+
browser_path = "/usr/bin/google-chrome"
|
43 |
+
app = FastAPI()
|
44 |
+
|
45 |
+
|
46 |
+
# Pydantic model for the response
|
47 |
+
class CookieResponse(BaseModel):
|
48 |
+
cookies: Dict[str, str]
|
49 |
+
user_agent: str
|
50 |
+
|
51 |
+
|
52 |
+
# Function to check if the URL is safe
|
53 |
+
def is_safe_url(url: str) -> bool:
|
54 |
+
parsed_url = urlparse(url)
|
55 |
+
ip_pattern = re.compile(
|
56 |
+
r"^(127\.0\.0\.1|localhost|0\.0\.0\.0|::1|10\.\d+\.\d+\.\d+|172\.1[6-9]\.\d+\.\d+|172\.2[0-9]\.\d+\.\d+|172\.3[0-1]\.\d+\.\d+|192\.168\.\d+\.\d+)$"
|
57 |
+
)
|
58 |
+
hostname = parsed_url.hostname
|
59 |
+
if (hostname and ip_pattern.match(hostname)) or parsed_url.scheme == "file":
|
60 |
+
return False
|
61 |
+
return True
|
62 |
+
|
63 |
+
|
64 |
+
# Function to verify if the page has loaded properly
|
65 |
+
def verify_page_loaded(driver: ChromiumPage) -> bool:
|
66 |
+
"""Verify if the page has loaded properly"""
|
67 |
+
try:
|
68 |
+
# Wait for body element to be present
|
69 |
+
body = driver.ele('tag:body', timeout=10)
|
70 |
+
# Check if page has actual content
|
71 |
+
return len(body.html) > 100
|
72 |
+
except:
|
73 |
+
return False
|
74 |
+
|
75 |
+
|
76 |
+
# Function to bypass Cloudflare protection
|
77 |
+
def bypass_cloudflare(url: str, retries: int, log: bool, proxy: str = None) -> ChromiumPage:
|
78 |
+
max_load_retries = 3
|
79 |
+
|
80 |
+
for load_attempt in range(max_load_retries):
|
81 |
+
options = ChromiumOptions().auto_port()
|
82 |
+
if DOCKER_MODE:
|
83 |
+
options.set_argument("--auto-open-devtools-for-tabs", "true")
|
84 |
+
options.set_argument("--remote-debugging-port=9222")
|
85 |
+
options.set_argument("--no-sandbox") # Necessary for Docker
|
86 |
+
options.set_argument("--disable-gpu") # Optional, helps in some cases
|
87 |
+
options.set_paths(browser_path=browser_path).headless(False)
|
88 |
+
else:
|
89 |
+
options.set_paths(browser_path=browser_path).headless(False)
|
90 |
+
|
91 |
+
if proxy:
|
92 |
+
options.set_proxy(proxy)
|
93 |
+
|
94 |
+
driver = ChromiumPage(addr_or_opts=options)
|
95 |
+
try:
|
96 |
+
driver.get(url)
|
97 |
+
# Wait for initial page load
|
98 |
+
time.sleep(5)
|
99 |
+
|
100 |
+
if not verify_page_loaded(driver):
|
101 |
+
driver.quit()
|
102 |
+
if load_attempt < max_load_retries - 1:
|
103 |
+
time.sleep(3)
|
104 |
+
continue
|
105 |
+
else:
|
106 |
+
raise Exception("Failed to load page properly after multiple attempts")
|
107 |
+
|
108 |
+
cf_bypasser = CloudflareBypasser(driver, retries, log)
|
109 |
+
cf_bypasser.bypass()
|
110 |
+
return driver
|
111 |
+
except Exception as e:
|
112 |
+
driver.quit()
|
113 |
+
if load_attempt < max_load_retries - 1:
|
114 |
+
time.sleep(3)
|
115 |
+
continue
|
116 |
+
raise e
|
117 |
+
|
118 |
+
|
119 |
+
# Endpoint to get cookies
|
120 |
+
@app.get("/cookies", response_model=CookieResponse)
|
121 |
+
async def get_cookies(url: str, retries: int = 5, proxy: str = None):
|
122 |
+
if not is_safe_url(url):
|
123 |
+
raise HTTPException(status_code=400, detail="Invalid URL")
|
124 |
+
try:
|
125 |
+
driver = bypass_cloudflare(url, retries, log, proxy)
|
126 |
+
cookies = {cookie.get("name", ""): cookie.get("value", " ") for cookie in driver.cookies()}
|
127 |
+
user_agent = driver.user_agent
|
128 |
+
driver.quit()
|
129 |
+
return CookieResponse(cookies=cookies, user_agent=user_agent)
|
130 |
+
except Exception as e:
|
131 |
+
raise HTTPException(status_code=500, detail=str(e))
|
132 |
+
|
133 |
+
|
134 |
+
# Endpoint to get HTML content and cookies
|
135 |
+
@app.get("/html")
|
136 |
+
async def get_html(url: str, retries: int = 5, proxy: str = None):
|
137 |
+
if not is_safe_url(url):
|
138 |
+
raise HTTPException(status_code=400, detail="Invalid URL")
|
139 |
+
try:
|
140 |
+
driver = bypass_cloudflare(url, retries, log, proxy)
|
141 |
+
html = driver.html
|
142 |
+
cookies_json = {cookie.get("name", ""): cookie.get("value", " ") for cookie in driver.cookies()}
|
143 |
+
response = Response(content=html, media_type="text/html")
|
144 |
+
response.headers["cookies"] = json.dumps(cookies_json)
|
145 |
+
response.headers["user_agent"] = driver.user_agent
|
146 |
+
driver.quit()
|
147 |
+
return response
|
148 |
+
except Exception as e:
|
149 |
+
raise HTTPException(status_code=500, detail=str(e))
|
150 |
+
|
151 |
+
|
152 |
+
# Main entry point
|
153 |
+
if __name__ == "__main__":
|
154 |
+
parser = argparse.ArgumentParser(description="Cloudflare bypass api")
|
155 |
+
|
156 |
+
parser.add_argument("--nolog", action="store_true", help="Disable logging")
|
157 |
+
parser.add_argument("--headless", action="store_true", help="Run in headless mode")
|
158 |
+
|
159 |
+
args = parser.parse_args()
|
160 |
+
display = None
|
161 |
+
|
162 |
+
if args.headless or DOCKER_MODE:
|
163 |
+
display = Display(visible=0, size=(1920, 1080))
|
164 |
+
display.start()
|
165 |
+
|
166 |
+
def cleanup_display():
|
167 |
+
if display:
|
168 |
+
display.stop()
|
169 |
+
atexit.register(cleanup_display)
|
170 |
+
|
171 |
+
if args.nolog:
|
172 |
+
log = False
|
173 |
+
else:
|
174 |
+
log = True
|
175 |
+
|
176 |
+
uvicorn.run(app, host="0.0.0.0", port=SERVER_PORT)
|
dsk/wasm/sha3_wasm_bg.7b9ca65ddd.wasm
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:b3fca8cc072c1defbd60c02266a8e48bd307a1804aaff4314900aea720e72f7d
|
3 |
+
size 26612
|