Update main.py
Browse files
main.py
CHANGED
@@ -8,103 +8,60 @@ from typing import Any, Dict, List, Optional
|
|
8 |
import httpx
|
9 |
import uvicorn
|
10 |
from dotenv import load_dotenv
|
11 |
-
from fastapi import FastAPI, HTTPException, Depends
|
12 |
from fastapi.security import HTTPBearer, HTTPAuthorizationCredentials
|
13 |
from pydantic import BaseModel
|
14 |
from starlette.middleware.cors import CORSMiddleware
|
15 |
from starlette.responses import StreamingResponse, Response
|
16 |
|
|
|
17 |
logging.basicConfig(
|
18 |
level=logging.INFO, format="%(asctime)s - %(name)s - %(levelname)s - %(message)s"
|
19 |
)
|
20 |
logger = logging.getLogger(__name__)
|
21 |
|
|
|
22 |
load_dotenv()
|
|
|
|
|
|
|
|
|
23 |
app = FastAPI()
|
24 |
-
BASE_URL = "https://aichatonlineorg.erweima.ai/aichatonline"
|
25 |
-
APP_SECRET = os.getenv("APP_SECRET","786")
|
26 |
-
ACCESS_TOKEN = os.getenv("SD_ACCESS_TOKEN","")
|
27 |
-
headers = {
|
28 |
-
'accept': '*/*',
|
29 |
-
'accept-language': 'en-US,en;q=0.9',
|
30 |
-
'authorization': f'Bearer {ACCESS_TOKEN}',
|
31 |
-
'cache-control': 'no-cache',
|
32 |
-
'origin': 'chrome-extension://difoiogjjojoaoomphldepapgpbgkhkb',
|
33 |
-
'pragma': 'no-cache',
|
34 |
-
'priority': 'u=1, i',
|
35 |
-
'cookie': 'lang=en; source=gg; p1=pricing; p2=search; _clck=ih7kjx%7C2%7Cfqq%7C0%7C1774; _gcl_gs=2.1.k1$i1731168118$u7016880; _ga=GA1.1.1378917294.1731168124; _gcl_au=1.1.436410932.1731168125; _fbp=fb.1.1731168125483.909164969447382788; _gcl_aw=GCL.1731168126.EAIaIQobChMI_aOK5c_PiQMVAqSDBx2OJh5bEAAYASAAEgJr9PD_BwE; __stripe_mid=b2de6d00-4ac3-4c15-9ba8-355dd63bbdf0cbeb53; _uetsid=f73638b09eb311efa0cf6b0f13512e16; _uetvid=f73635709eb311efbd8d55293947782a; token=Bearer%20eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJ1c2VyX2lkIjoxNTYzMzI1NSwicmVnaXN0ZXJfdHlwZSI6Im9hdXRoMiIsImFwcF9uYW1lIjoiQ2hpdENoYXRfV2ViIiwidG9rZW5faWQiOiI2MGRmZGMwMi1lY2QzLTQzNzktYjMzNy1kYmNlNGY5M2M1N2IiLCJpc3MiOiJzaWRlci5haSIsImF1ZCI6WyIiXSwiZXhwIjoxNzYyMjcyNTk5LCJuYmYiOjE3MzExNjg1OTksImlhdCI6MTczMTE2ODU5OX0.cT9hXiCFm-4Or2RV5Lf3IY-blu40UOvjqD01BGoDwh8; refresh_token=discard; userinfo-avatar=https://chitchat-avatar.s3.amazonaws.com/eb0e281f19c442d681cf552f3b8896dd-1731167278.png; userinfo-name=Chat%20GPT%20free; userinfo-type=oauth2; CloudFront-Policy=eyJTdGF0ZW1lbnQiOlt7IlJlc291cmNlIjoiaHR0cHM6Ly9maWxlLWNkbi5zaWRlci5haS8qL1UwMUFIRUc4NkVBLyoiLCJDb25kaXRpb24iOnsiRGF0ZUxlc3NUaGFuIjp7IkFXUzpFcG9jaFRpbWUiOjE3MzM3NjA2MDF9fX1dfQ__; CloudFront-Signature=fuXgBZW3E5TCNvvi8mb8DKpNTneaPIGunH~zxWPoVK~OaG-n6PRVz2qlxOFCY3HLTZyA72mQ4T2OtKCZGoWCTL4QL6DP5LnRw7HPt50K~D8Wte5M3GQjuSeuBe~mK44Sk~Xqn1tOa2dmOtri84vlLdo7ud4ZGuMML60AUve13l7eS8uAJO88cQ9rhdZMJ26opJhayI0MSJgCFZymzMm1iEwwSF3ufV5c2elpTuKqVTP2HqZGHzgJQvRl7U~gU4qFiH7KxCG0MF5X0HDsz3Pn0qghNle~FLAtCumI-mzoGNCXEM9~qpYAXJLO3a--ThhnbTrWsPLl2l~zCRiKQ5wB3g__; CloudFront-Key-Pair-Id=K344F5VVSSM536; _rdt_uuid=1731168120149.9c981ceb-cb94-490d-99d8-0f1530348c3c; _clsk=r6ig1v%7C1731170609000%7C8%7C1%7Cu.clarity.ms%2Fcollect; _ga_0PRFKME4HP=GS1.1.1731171204.2.1.1731171205.59.0.0',
|
36 |
-
'sec-fetch-dest': 'empty',
|
37 |
-
'sec-fetch-mode': 'cors',
|
38 |
-
'sec-fetch-site': 'none',
|
39 |
-
'sec-fetch-site': 'none',
|
40 |
-
'user-agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/130.0.0.0 Safari/537.36',
|
41 |
-
}
|
42 |
|
|
|
43 |
ALLOWED_MODELS = [
|
44 |
-
{"id": "claude-3.5-sonnet", "name": "
|
45 |
-
{"id": "claude-3-opus", "name": "
|
46 |
-
{"id": "gemini-1.5-pro", "name": "
|
47 |
-
{"id": "gpt-4o", "name": "
|
48 |
-
{"id": "o1-preview", "name": "
|
49 |
-
{"id": "o1-mini", "name": "
|
50 |
-
{"id": "gpt-4o-mini", "name": "
|
51 |
]
|
52 |
-
|
|
|
53 |
app.add_middleware(
|
54 |
CORSMiddleware,
|
55 |
-
allow_origins=["
|
56 |
allow_credentials=True,
|
57 |
-
allow_methods=["
|
58 |
-
allow_headers=["
|
59 |
)
|
60 |
-
security = HTTPBearer()
|
61 |
|
|
|
|
|
62 |
|
|
|
63 |
class Message(BaseModel):
|
64 |
role: str
|
65 |
content: str
|
66 |
|
67 |
-
|
68 |
class ChatRequest(BaseModel):
|
69 |
model: str
|
70 |
messages: List[Message]
|
71 |
stream: Optional[bool] = False
|
72 |
|
73 |
-
|
74 |
-
def simulate_data(content, model):
|
75 |
-
return {
|
76 |
-
"id": f"chatcmpl-{uuid.uuid4()}",
|
77 |
-
"object": "chat.completion.chunk",
|
78 |
-
"created": int(datetime.now().timestamp()),
|
79 |
-
"model": model,
|
80 |
-
"choices": [
|
81 |
-
{
|
82 |
-
"index": 0,
|
83 |
-
"delta": {"content": content, "role": "assistant"},
|
84 |
-
"finish_reason": None,
|
85 |
-
}
|
86 |
-
],
|
87 |
-
"usage": None,
|
88 |
-
}
|
89 |
-
|
90 |
-
|
91 |
-
def stop_data(content, model):
|
92 |
-
return {
|
93 |
-
"id": f"chatcmpl-{uuid.uuid4()}",
|
94 |
-
"object": "chat.completion.chunk",
|
95 |
-
"created": int(datetime.now().timestamp()),
|
96 |
-
"model": model,
|
97 |
-
"choices": [
|
98 |
-
{
|
99 |
-
"index": 0,
|
100 |
-
"delta": {"content": content, "role": "assistant"},
|
101 |
-
"finish_reason": "stop",
|
102 |
-
}
|
103 |
-
],
|
104 |
-
"usage": None,
|
105 |
-
}
|
106 |
-
|
107 |
-
|
108 |
def create_chat_completion_data(content: str, model: str, finish_reason: Optional[str] = None) -> Dict[str, Any]:
|
109 |
return {
|
110 |
"id": f"chatcmpl-{uuid.uuid4()}",
|
@@ -121,13 +78,12 @@ def create_chat_completion_data(content: str, model: str, finish_reason: Optiona
|
|
121 |
"usage": None,
|
122 |
}
|
123 |
|
124 |
-
|
125 |
def verify_app_secret(credentials: HTTPAuthorizationCredentials = Depends(security)):
|
126 |
if credentials.credentials != APP_SECRET:
|
127 |
raise HTTPException(status_code=403, detail="Invalid APP_SECRET")
|
128 |
return credentials.credentials
|
129 |
|
130 |
-
|
131 |
@app.options("/hf/v1/chat/completions")
|
132 |
async def chat_completions_options():
|
133 |
return Response(
|
@@ -139,16 +95,10 @@ async def chat_completions_options():
|
|
139 |
},
|
140 |
)
|
141 |
|
142 |
-
|
143 |
-
def replace_escaped_newlines(input_string: str) -> str:
|
144 |
-
return input_string.replace("\\n", "\n")
|
145 |
-
|
146 |
-
|
147 |
@app.get("/hf/v1/models")
|
148 |
async def list_models():
|
149 |
return {"object": "list", "data": ALLOWED_MODELS}
|
150 |
|
151 |
-
|
152 |
@app.post("/hf/v1/chat/completions")
|
153 |
async def chat_completions(
|
154 |
request: ChatRequest, app_secret: str = Depends(verify_app_secret)
|
@@ -156,65 +106,91 @@ async def chat_completions(
|
|
156 |
logger.info(f"Received chat completion request for model: {request.model}")
|
157 |
|
158 |
if request.model not in [model['id'] for model in ALLOWED_MODELS]:
|
|
|
159 |
raise HTTPException(
|
160 |
status_code=400,
|
161 |
-
detail=f"Model {request.model} is not allowed. Allowed models are: {
|
162 |
)
|
|
|
163 |
# Generate a UUID
|
164 |
-
|
165 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
166 |
|
167 |
-
#
|
168 |
json_data = {
|
169 |
'prompt': "\n".join(
|
170 |
[
|
171 |
f"{'User' if msg.role == 'user' else 'Assistant'}: {msg.content}"
|
172 |
for msg in request.messages
|
173 |
]
|
174 |
-
|
175 |
'stream': True,
|
176 |
'app_name': 'ChitChat_Edge_Ext',
|
177 |
'app_version': '4.28.0',
|
178 |
'tz_name': 'Asia/Karachi',
|
179 |
'cid': 'C092SEMXM9BJ',
|
180 |
'model': request.model,
|
181 |
-
'search': False,
|
182 |
-
'auto_search': False,
|
183 |
'from': 'chat',
|
184 |
'group_id': 'default',
|
185 |
'prompt_template': {
|
186 |
-
|
187 |
-
|
188 |
-
|
|
|
189 |
},
|
190 |
-
},
|
191 |
'tools': {
|
192 |
-
'auto': ['text_to_image', 'data_analysis'],
|
193 |
-
|
194 |
'extra_info': {
|
195 |
'origin_url': 'chrome-extension://difoiogjjojoaoomphldepapgpbgkhkb/standalone.html?from=sidebar',
|
196 |
'origin_title': 'Sider',
|
197 |
-
|
198 |
-
}
|
199 |
-
|
200 |
-
|
201 |
|
202 |
async def generate():
|
203 |
async with httpx.AsyncClient() as client:
|
204 |
try:
|
205 |
-
async with client.stream(
|
|
|
|
|
|
|
|
|
|
|
|
|
206 |
response.raise_for_status()
|
207 |
async for line in response.aiter_lines():
|
208 |
if line and ("[DONE]" not in line):
|
209 |
-
|
210 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
211 |
yield f"data: {json.dumps(create_chat_completion_data('', request.model, 'stop'))}\n\n"
|
212 |
yield "data: [DONE]\n\n"
|
213 |
except httpx.HTTPStatusError as e:
|
214 |
logger.error(f"HTTP error occurred: {e}")
|
215 |
raise HTTPException(status_code=e.response.status_code, detail=str(e))
|
216 |
except httpx.RequestError as e:
|
217 |
-
logger.error(f"
|
218 |
raise HTTPException(status_code=500, detail=str(e))
|
219 |
|
220 |
if request.stream:
|
@@ -225,11 +201,10 @@ async def chat_completions(
|
|
225 |
full_response = ""
|
226 |
async for chunk in generate():
|
227 |
if chunk.startswith("data: ") and not chunk[6:].startswith("[DONE]"):
|
228 |
-
# print(chunk)
|
229 |
data = json.loads(chunk[6:])
|
230 |
if data["choices"][0]["delta"].get("content"):
|
231 |
full_response += data["choices"][0]["delta"]["content"]
|
232 |
-
|
233 |
return {
|
234 |
"id": f"chatcmpl-{uuid.uuid4()}",
|
235 |
"object": "chat.completion",
|
@@ -245,7 +220,6 @@ async def chat_completions(
|
|
245 |
"usage": None,
|
246 |
}
|
247 |
|
248 |
-
|
249 |
-
|
250 |
-
|
251 |
-
uvicorn.run(app, host="0.0.0.0", port=7860)
|
|
|
8 |
import httpx
|
9 |
import uvicorn
|
10 |
from dotenv import load_dotenv
|
11 |
+
from fastapi import FastAPI, HTTPException, Depends, Request
|
12 |
from fastapi.security import HTTPBearer, HTTPAuthorizationCredentials
|
13 |
from pydantic import BaseModel
|
14 |
from starlette.middleware.cors import CORSMiddleware
|
15 |
from starlette.responses import StreamingResponse, Response
|
16 |
|
17 |
+
# Configure Logging
|
18 |
logging.basicConfig(
|
19 |
level=logging.INFO, format="%(asctime)s - %(name)s - %(levelname)s - %(message)s"
|
20 |
)
|
21 |
logger = logging.getLogger(__name__)
|
22 |
|
23 |
+
# Load Environment Variables
|
24 |
load_dotenv()
|
25 |
+
APP_SECRET = os.getenv("APP_SECRET", "786")
|
26 |
+
ACCESS_TOKEN = os.getenv("SD_ACCESS_TOKEN", "")
|
27 |
+
|
28 |
+
# Initialize FastAPI
|
29 |
app = FastAPI()
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
30 |
|
31 |
+
# Define Allowed Models
|
32 |
ALLOWED_MODELS = [
|
33 |
+
{"id": "claude-3.5-sonnet", "name": "Claude 3.5 Sonnet"},
|
34 |
+
{"id": "claude-3-opus", "name": "Claude 3 Opus"},
|
35 |
+
{"id": "gemini-1.5-pro", "name": "Gemini 1.5 Pro"},
|
36 |
+
{"id": "gpt-4o", "name": "GPT-4o"},
|
37 |
+
{"id": "o1-preview", "name": "O1 Preview"},
|
38 |
+
{"id": "o1-mini", "name": "O1 Mini"},
|
39 |
+
{"id": "gpt-4o-mini", "name": "GPT-4o Mini"},
|
40 |
]
|
41 |
+
|
42 |
+
# Configure CORS Middleware
|
43 |
app.add_middleware(
|
44 |
CORSMiddleware,
|
45 |
+
allow_origins=["https://yourdomain.com"], # Replace with your trusted domains
|
46 |
allow_credentials=True,
|
47 |
+
allow_methods=["POST", "OPTIONS"],
|
48 |
+
allow_headers=["Content-Type", "Authorization"],
|
49 |
)
|
|
|
50 |
|
51 |
+
# Security Scheme
|
52 |
+
security = HTTPBearer()
|
53 |
|
54 |
+
# Pydantic Models
|
55 |
class Message(BaseModel):
|
56 |
role: str
|
57 |
content: str
|
58 |
|
|
|
59 |
class ChatRequest(BaseModel):
|
60 |
model: str
|
61 |
messages: List[Message]
|
62 |
stream: Optional[bool] = False
|
63 |
|
64 |
+
# Helper Functions
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
65 |
def create_chat_completion_data(content: str, model: str, finish_reason: Optional[str] = None) -> Dict[str, Any]:
|
66 |
return {
|
67 |
"id": f"chatcmpl-{uuid.uuid4()}",
|
|
|
78 |
"usage": None,
|
79 |
}
|
80 |
|
|
|
81 |
def verify_app_secret(credentials: HTTPAuthorizationCredentials = Depends(security)):
|
82 |
if credentials.credentials != APP_SECRET:
|
83 |
raise HTTPException(status_code=403, detail="Invalid APP_SECRET")
|
84 |
return credentials.credentials
|
85 |
|
86 |
+
# Routes
|
87 |
@app.options("/hf/v1/chat/completions")
|
88 |
async def chat_completions_options():
|
89 |
return Response(
|
|
|
95 |
},
|
96 |
)
|
97 |
|
|
|
|
|
|
|
|
|
|
|
98 |
@app.get("/hf/v1/models")
|
99 |
async def list_models():
|
100 |
return {"object": "list", "data": ALLOWED_MODELS}
|
101 |
|
|
|
102 |
@app.post("/hf/v1/chat/completions")
|
103 |
async def chat_completions(
|
104 |
request: ChatRequest, app_secret: str = Depends(verify_app_secret)
|
|
|
106 |
logger.info(f"Received chat completion request for model: {request.model}")
|
107 |
|
108 |
if request.model not in [model['id'] for model in ALLOWED_MODELS]:
|
109 |
+
allowed = ', '.join(model['id'] for model in ALLOWED_MODELS)
|
110 |
raise HTTPException(
|
111 |
status_code=400,
|
112 |
+
detail=f"Model '{request.model}' is not allowed. Allowed models are: {allowed}",
|
113 |
)
|
114 |
+
|
115 |
# Generate a UUID
|
116 |
+
uuid_str = str(uuid.uuid4()).replace("-", "")
|
117 |
+
|
118 |
+
# Prepare Headers (Move sensitive data to environment variables or secure storage)
|
119 |
+
headers = {
|
120 |
+
'accept': '*/*',
|
121 |
+
'accept-language': 'en-US,en;q=0.9',
|
122 |
+
'authorization': f'Bearer {ACCESS_TOKEN}',
|
123 |
+
'cache-control': 'no-cache',
|
124 |
+
'origin': 'chrome-extension://difoiogjjojoaoomphldepapgpbgkhkb',
|
125 |
+
'pragma': 'no-cache',
|
126 |
+
'priority': 'u=1, i',
|
127 |
+
'sec-fetch-dest': 'empty',
|
128 |
+
'sec-fetch-mode': 'cors',
|
129 |
+
'sec-fetch-site': 'none',
|
130 |
+
'user-agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/130.0.0.0 Safari/537.36',
|
131 |
+
}
|
132 |
|
133 |
+
# Prepare JSON Payload
|
134 |
json_data = {
|
135 |
'prompt': "\n".join(
|
136 |
[
|
137 |
f"{'User' if msg.role == 'user' else 'Assistant'}: {msg.content}"
|
138 |
for msg in request.messages
|
139 |
]
|
140 |
+
),
|
141 |
'stream': True,
|
142 |
'app_name': 'ChitChat_Edge_Ext',
|
143 |
'app_version': '4.28.0',
|
144 |
'tz_name': 'Asia/Karachi',
|
145 |
'cid': 'C092SEMXM9BJ',
|
146 |
'model': request.model,
|
147 |
+
'search': False,
|
148 |
+
'auto_search': False,
|
149 |
'from': 'chat',
|
150 |
'group_id': 'default',
|
151 |
'prompt_template': {
|
152 |
+
'key': 'artifacts',
|
153 |
+
'attributes': {
|
154 |
+
'lang': 'original',
|
155 |
+
},
|
156 |
},
|
|
|
157 |
'tools': {
|
158 |
+
'auto': ['text_to_image', 'data_analysis'],
|
159 |
+
},
|
160 |
'extra_info': {
|
161 |
'origin_url': 'chrome-extension://difoiogjjojoaoomphldepapgpbgkhkb/standalone.html?from=sidebar',
|
162 |
'origin_title': 'Sider',
|
163 |
+
},
|
164 |
+
}
|
|
|
|
|
165 |
|
166 |
async def generate():
|
167 |
async with httpx.AsyncClient() as client:
|
168 |
try:
|
169 |
+
async with client.stream(
|
170 |
+
'POST',
|
171 |
+
'https://sider.ai/api/v2/completion/text',
|
172 |
+
headers=headers,
|
173 |
+
json=json_data,
|
174 |
+
timeout=120.0
|
175 |
+
) as response:
|
176 |
response.raise_for_status()
|
177 |
async for line in response.aiter_lines():
|
178 |
if line and ("[DONE]" not in line):
|
179 |
+
# Assuming the response starts with some prefix, adjust accordingly
|
180 |
+
try:
|
181 |
+
data = json.loads(line[5:]).get("data", {})
|
182 |
+
content = data.get("text", "")
|
183 |
+
yield f"data: {json.dumps(create_chat_completion_data(content, request.model))}\n\n"
|
184 |
+
except json.JSONDecodeError as e:
|
185 |
+
logger.error(f"JSON decode error: {e}")
|
186 |
+
# Indicate the end of the stream
|
187 |
yield f"data: {json.dumps(create_chat_completion_data('', request.model, 'stop'))}\n\n"
|
188 |
yield "data: [DONE]\n\n"
|
189 |
except httpx.HTTPStatusError as e:
|
190 |
logger.error(f"HTTP error occurred: {e}")
|
191 |
raise HTTPException(status_code=e.response.status_code, detail=str(e))
|
192 |
except httpx.RequestError as e:
|
193 |
+
logger.error(f"Request error occurred: {e}")
|
194 |
raise HTTPException(status_code=500, detail=str(e))
|
195 |
|
196 |
if request.stream:
|
|
|
201 |
full_response = ""
|
202 |
async for chunk in generate():
|
203 |
if chunk.startswith("data: ") and not chunk[6:].startswith("[DONE]"):
|
|
|
204 |
data = json.loads(chunk[6:])
|
205 |
if data["choices"][0]["delta"].get("content"):
|
206 |
full_response += data["choices"][0]["delta"]["content"]
|
207 |
+
|
208 |
return {
|
209 |
"id": f"chatcmpl-{uuid.uuid4()}",
|
210 |
"object": "chat.completion",
|
|
|
220 |
"usage": None,
|
221 |
}
|
222 |
|
223 |
+
# Remove the Uvicorn run block for production deployment
|
224 |
+
# if __name__ == "__main__":
|
225 |
+
# uvicorn.run(app, host="0.0.0.0", port=7860)
|
|