Spaces:
Paused
Paused
Add auto proxy
Browse files- config.json +7 -7
- server/auto_proxy.py +106 -0
- server/backend.py +34 -12
config.json
CHANGED
|
@@ -1,8 +1,8 @@
|
|
| 1 |
{
|
| 2 |
-
|
| 3 |
-
|
| 4 |
-
|
| 5 |
-
|
| 6 |
-
|
| 7 |
-
|
| 8 |
-
}
|
|
|
|
| 1 |
{
|
| 2 |
+
"site_config": {
|
| 3 |
+
"host": "0.0.0.0",
|
| 4 |
+
"port": 1338,
|
| 5 |
+
"debug": false
|
| 6 |
+
},
|
| 7 |
+
"use_auto_proxy": false
|
| 8 |
+
}
|
server/auto_proxy.py
ADDED
|
@@ -0,0 +1,106 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import random
|
| 2 |
+
import requests
|
| 3 |
+
import time
|
| 4 |
+
import threading
|
| 5 |
+
from freeGPT import gpt3
|
| 6 |
+
|
| 7 |
+
|
| 8 |
+
def fetch_proxies():
|
| 9 |
+
"""Fetch a list of proxy servers from proxyscrape.com.
|
| 10 |
+
|
| 11 |
+
Returns:
|
| 12 |
+
list: A list of proxy servers in the format "IP:Port".
|
| 13 |
+
"""
|
| 14 |
+
url = "https://api.proxyscrape.com/v2/?request=displayproxies&protocol=http&timeout=1000&country=all&ssl=all&anonymity=all"
|
| 15 |
+
response = requests.get(url)
|
| 16 |
+
if response.status_code == 200:
|
| 17 |
+
proxy_list = response.text.split("\r\n")[:-1]
|
| 18 |
+
return proxy_list
|
| 19 |
+
else:
|
| 20 |
+
print(f"Error fetching proxies: {response.status_code}")
|
| 21 |
+
return []
|
| 22 |
+
|
| 23 |
+
|
| 24 |
+
def test_proxy(proxy, prompt, timeout):
|
| 25 |
+
"""Test the given proxy server with a specified prompt and timeout.
|
| 26 |
+
|
| 27 |
+
Args:
|
| 28 |
+
proxy (str): The proxy server in the format "IP:Port".
|
| 29 |
+
prompt (str): The test prompt to be used for testing.
|
| 30 |
+
timeout (int): The maximum time in seconds allowed for the test.
|
| 31 |
+
"""
|
| 32 |
+
try:
|
| 33 |
+
start_time = time.time()
|
| 34 |
+
res = gpt3.Completion.create(prompt=prompt, proxy=proxy)
|
| 35 |
+
end_time = time.time()
|
| 36 |
+
response_time = end_time - start_time
|
| 37 |
+
|
| 38 |
+
if response_time < timeout:
|
| 39 |
+
response_time = int(response_time*1000)
|
| 40 |
+
print(f'proxy: {proxy} [{response_time}ms] ✅')
|
| 41 |
+
add_working_proxy((proxy))
|
| 42 |
+
except Exception as e:
|
| 43 |
+
pass
|
| 44 |
+
|
| 45 |
+
|
| 46 |
+
def add_working_proxy(proxy):
|
| 47 |
+
"""Add a working proxy server to the global working_proxies list.
|
| 48 |
+
|
| 49 |
+
Args:
|
| 50 |
+
proxy (str): The proxy server in the format "IP:Port".
|
| 51 |
+
"""
|
| 52 |
+
global working_proxies
|
| 53 |
+
working_proxies.append(proxy)
|
| 54 |
+
|
| 55 |
+
|
| 56 |
+
def remove_proxy(proxy):
|
| 57 |
+
"""Remove a proxy server from the global working_proxies list.
|
| 58 |
+
|
| 59 |
+
Args:
|
| 60 |
+
proxy (str): The proxy server in the format "IP:Port".
|
| 61 |
+
"""
|
| 62 |
+
global working_proxies
|
| 63 |
+
if proxy in working_proxies:
|
| 64 |
+
working_proxies.remove(proxy)
|
| 65 |
+
|
| 66 |
+
|
| 67 |
+
def get_working_proxies(prompt, timeout=5):
|
| 68 |
+
"""Fetch and test proxy servers, adding working proxies to the global working_proxies list.
|
| 69 |
+
|
| 70 |
+
Args:
|
| 71 |
+
prompt (str): The test prompt to be used for testing.
|
| 72 |
+
timeout (int, optional): The maximum time in seconds allowed for testing. Defaults to 5.
|
| 73 |
+
"""
|
| 74 |
+
proxy_list = fetch_proxies()
|
| 75 |
+
threads = []
|
| 76 |
+
|
| 77 |
+
for proxy in proxy_list:
|
| 78 |
+
thread = threading.Thread(target=test_proxy, args=(
|
| 79 |
+
proxy, prompt, timeout))
|
| 80 |
+
threads.append(thread)
|
| 81 |
+
thread.start()
|
| 82 |
+
|
| 83 |
+
for t in threads:
|
| 84 |
+
t.join(timeout)
|
| 85 |
+
|
| 86 |
+
|
| 87 |
+
def update_working_proxies():
|
| 88 |
+
"""Continuously update the global working_proxies list with working proxy servers."""
|
| 89 |
+
global working_proxies
|
| 90 |
+
test_prompt = "What is the capital of France?"
|
| 91 |
+
|
| 92 |
+
while True:
|
| 93 |
+
working_proxies = [] # Clear the list before updating
|
| 94 |
+
get_working_proxies(test_prompt)
|
| 95 |
+
print('proxies updated')
|
| 96 |
+
time.sleep(1800) # Update proxies list every 30 minutes
|
| 97 |
+
|
| 98 |
+
|
| 99 |
+
def get_random_proxy():
|
| 100 |
+
"""Get a random working proxy server from the global working_proxies list.
|
| 101 |
+
|
| 102 |
+
Returns:
|
| 103 |
+
str: A random working proxy server in the format "IP:Port".
|
| 104 |
+
"""
|
| 105 |
+
global working_proxies
|
| 106 |
+
return random.choice(working_proxies)
|
server/backend.py
CHANGED
|
@@ -1,19 +1,16 @@
|
|
| 1 |
-
|
| 2 |
from flask import request
|
| 3 |
-
from hashlib import sha256
|
| 4 |
from datetime import datetime
|
| 5 |
from requests import get
|
| 6 |
-
from requests import post
|
| 7 |
-
from json import loads
|
| 8 |
from freeGPT import gpt3
|
| 9 |
-
|
| 10 |
from server.config import special_instructions
|
| 11 |
|
| 12 |
|
| 13 |
class Backend_Api:
|
| 14 |
def __init__(self, app, config: dict) -> None:
|
| 15 |
self.app = app
|
| 16 |
-
self.
|
| 17 |
self.routes = {
|
| 18 |
'/backend-api/v2/conversation': {
|
| 19 |
'function': self._conversation,
|
|
@@ -21,6 +18,11 @@ class Backend_Api:
|
|
| 21 |
}
|
| 22 |
}
|
| 23 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 24 |
def _conversation(self):
|
| 25 |
try:
|
| 26 |
jailbreak = request.json['jailbreak']
|
|
@@ -53,12 +55,32 @@ class Backend_Api:
|
|
| 53 |
extra + special_instructions[jailbreak] + \
|
| 54 |
_conversation + [prompt]
|
| 55 |
|
| 56 |
-
def stream():
|
| 57 |
-
|
| 58 |
-
|
| 59 |
-
|
| 60 |
-
|
| 61 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 62 |
|
| 63 |
except Exception as e:
|
| 64 |
print(e)
|
|
|
|
| 1 |
+
import threading
|
| 2 |
from flask import request
|
|
|
|
| 3 |
from datetime import datetime
|
| 4 |
from requests import get
|
|
|
|
|
|
|
| 5 |
from freeGPT import gpt3
|
| 6 |
+
from server.auto_proxy import get_random_proxy, remove_proxy, update_working_proxies
|
| 7 |
from server.config import special_instructions
|
| 8 |
|
| 9 |
|
| 10 |
class Backend_Api:
|
| 11 |
def __init__(self, app, config: dict) -> None:
|
| 12 |
self.app = app
|
| 13 |
+
self.use_auto_proxy = config['use_auto_proxy']
|
| 14 |
self.routes = {
|
| 15 |
'/backend-api/v2/conversation': {
|
| 16 |
'function': self._conversation,
|
|
|
|
| 18 |
}
|
| 19 |
}
|
| 20 |
|
| 21 |
+
if self.use_auto_proxy:
|
| 22 |
+
update_proxies = threading.Thread(
|
| 23 |
+
target=update_working_proxies, daemon=True)
|
| 24 |
+
update_proxies.start()
|
| 25 |
+
|
| 26 |
def _conversation(self):
|
| 27 |
try:
|
| 28 |
jailbreak = request.json['jailbreak']
|
|
|
|
| 55 |
extra + special_instructions[jailbreak] + \
|
| 56 |
_conversation + [prompt]
|
| 57 |
|
| 58 |
+
def stream():
|
| 59 |
+
response = None
|
| 60 |
+
|
| 61 |
+
while self.use_auto_proxy:
|
| 62 |
+
try:
|
| 63 |
+
random_proxy = get_random_proxy()
|
| 64 |
+
res = gpt3.Completion.create(
|
| 65 |
+
prompt=conversation, proxy=random_proxy)
|
| 66 |
+
response = res['text']
|
| 67 |
+
break
|
| 68 |
+
except Exception as e:
|
| 69 |
+
print(f"Error with proxy {random_proxy}: {e}")
|
| 70 |
+
remove_proxy(random_proxy)
|
| 71 |
+
|
| 72 |
+
if not self.use_auto_proxy:
|
| 73 |
+
try:
|
| 74 |
+
res = gpt3.Completion.create(prompt=conversation)
|
| 75 |
+
response = res['text']
|
| 76 |
+
except Exception as e:
|
| 77 |
+
print(f"Error: {e}")
|
| 78 |
+
|
| 79 |
+
if response is not None:
|
| 80 |
+
print(response)
|
| 81 |
+
yield response
|
| 82 |
+
|
| 83 |
+
return self.app.response_class(stream(), mimetype='text/event-stream')
|
| 84 |
|
| 85 |
except Exception as e:
|
| 86 |
print(e)
|