Update api/utils.py
Browse files- api/utils.py +42 -8
api/utils.py
CHANGED
@@ -1,6 +1,6 @@
|
|
1 |
from datetime import datetime
|
2 |
import json
|
3 |
-
from typing import Any, Dict, Optional
|
4 |
|
5 |
import httpx
|
6 |
from api.config import (
|
@@ -13,7 +13,7 @@ from api.config import (
|
|
13 |
MODEL_REFERERS
|
14 |
)
|
15 |
from fastapi import HTTPException
|
16 |
-
from api.models import ChatRequest
|
17 |
|
18 |
from api.logger import setup_logger
|
19 |
|
@@ -31,8 +31,11 @@ async def process_streaming_response(request: ChatRequest):
|
|
31 |
dynamic_headers = headers.copy()
|
32 |
dynamic_headers['Referer'] = referer_url
|
33 |
|
|
|
|
|
|
|
34 |
json_data = {
|
35 |
-
"messages":
|
36 |
"previewToken": None,
|
37 |
"userId": None,
|
38 |
"codeModelMode": True,
|
@@ -63,8 +66,16 @@ async def process_streaming_response(request: ChatRequest):
|
|
63 |
timeout=100,
|
64 |
) as response:
|
65 |
response.raise_for_status()
|
66 |
-
async for
|
67 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
68 |
except httpx.HTTPStatusError as e:
|
69 |
logger.error(f"HTTP error occurred: {e}")
|
70 |
raise HTTPException(status_code=e.response.status_code, detail=str(e))
|
@@ -72,6 +83,7 @@ async def process_streaming_response(request: ChatRequest):
|
|
72 |
logger.error(f"Error occurred during request: {e}")
|
73 |
raise HTTPException(status_code=500, detail=str(e))
|
74 |
|
|
|
75 |
async def process_non_streaming_response(request: ChatRequest):
|
76 |
agent_mode = AGENT_MODE.get(request.model, {})
|
77 |
trending_agent_mode = TRENDING_AGENT_MODE.get(request.model, {})
|
@@ -82,8 +94,11 @@ async def process_non_streaming_response(request: ChatRequest):
|
|
82 |
dynamic_headers = headers.copy()
|
83 |
dynamic_headers['Referer'] = referer_url
|
84 |
|
|
|
|
|
|
|
85 |
json_data = {
|
86 |
-
"messages":
|
87 |
"previewToken": None,
|
88 |
"userId": None,
|
89 |
"codeModelMode": True,
|
@@ -103,7 +118,7 @@ async def process_non_streaming_response(request: ChatRequest):
|
|
103 |
"mobileClient": False,
|
104 |
"userSelectedModel": MODEL_MAPPING.get(request.model, request.model),
|
105 |
}
|
106 |
-
|
107 |
async with httpx.AsyncClient() as client:
|
108 |
try:
|
109 |
response = await client.post(
|
@@ -113,10 +128,29 @@ async def process_non_streaming_response(request: ChatRequest):
|
|
113 |
timeout=100,
|
114 |
)
|
115 |
response.raise_for_status()
|
116 |
-
|
117 |
except httpx.HTTPStatusError as e:
|
118 |
logger.error(f"HTTP error occurred: {e}")
|
119 |
raise HTTPException(status_code=e.response.status_code, detail=str(e))
|
120 |
except httpx.RequestError as e:
|
121 |
logger.error(f"Error occurred during request: {e}")
|
122 |
raise HTTPException(status_code=500, detail=str(e))
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
from datetime import datetime
|
2 |
import json
|
3 |
+
from typing import Any, Dict, Optional, List
|
4 |
|
5 |
import httpx
|
6 |
from api.config import (
|
|
|
13 |
MODEL_REFERERS
|
14 |
)
|
15 |
from fastapi import HTTPException
|
16 |
+
from api.models import ChatRequest, Message
|
17 |
|
18 |
from api.logger import setup_logger
|
19 |
|
|
|
31 |
dynamic_headers = headers.copy()
|
32 |
dynamic_headers['Referer'] = referer_url
|
33 |
|
34 |
+
# Convert Message objects to dictionaries
|
35 |
+
messages = [msg.dict() for msg in request.messages]
|
36 |
+
|
37 |
json_data = {
|
38 |
+
"messages": messages, # Pass serialized messages
|
39 |
"previewToken": None,
|
40 |
"userId": None,
|
41 |
"codeModelMode": True,
|
|
|
66 |
timeout=100,
|
67 |
) as response:
|
68 |
response.raise_for_status()
|
69 |
+
async for line in response.aiter_lines():
|
70 |
+
timestamp = int(datetime.now().timestamp())
|
71 |
+
if line:
|
72 |
+
content = line
|
73 |
+
if content.startswith("$@$v=undefined-rv1$@$"):
|
74 |
+
content = content[21:]
|
75 |
+
# Since we've removed custom processing, directly yield the content
|
76 |
+
yield f"data: {content}\n\n"
|
77 |
+
|
78 |
+
yield f"data: [DONE]\n\n"
|
79 |
except httpx.HTTPStatusError as e:
|
80 |
logger.error(f"HTTP error occurred: {e}")
|
81 |
raise HTTPException(status_code=e.response.status_code, detail=str(e))
|
|
|
83 |
logger.error(f"Error occurred during request: {e}")
|
84 |
raise HTTPException(status_code=500, detail=str(e))
|
85 |
|
86 |
+
|
87 |
async def process_non_streaming_response(request: ChatRequest):
|
88 |
agent_mode = AGENT_MODE.get(request.model, {})
|
89 |
trending_agent_mode = TRENDING_AGENT_MODE.get(request.model, {})
|
|
|
94 |
dynamic_headers = headers.copy()
|
95 |
dynamic_headers['Referer'] = referer_url
|
96 |
|
97 |
+
# Convert Message objects to dictionaries
|
98 |
+
messages = [msg.dict() for msg in request.messages]
|
99 |
+
|
100 |
json_data = {
|
101 |
+
"messages": messages, # Pass serialized messages
|
102 |
"previewToken": None,
|
103 |
"userId": None,
|
104 |
"codeModelMode": True,
|
|
|
118 |
"mobileClient": False,
|
119 |
"userSelectedModel": MODEL_MAPPING.get(request.model, request.model),
|
120 |
}
|
121 |
+
full_response = ""
|
122 |
async with httpx.AsyncClient() as client:
|
123 |
try:
|
124 |
response = await client.post(
|
|
|
128 |
timeout=100,
|
129 |
)
|
130 |
response.raise_for_status()
|
131 |
+
full_response = response.text # Get the response text
|
132 |
except httpx.HTTPStatusError as e:
|
133 |
logger.error(f"HTTP error occurred: {e}")
|
134 |
raise HTTPException(status_code=e.response.status_code, detail=str(e))
|
135 |
except httpx.RequestError as e:
|
136 |
logger.error(f"Error occurred during request: {e}")
|
137 |
raise HTTPException(status_code=500, detail=str(e))
|
138 |
+
|
139 |
+
if full_response.startswith("$@$v=undefined-rv1$@$"):
|
140 |
+
full_response = full_response[21:]
|
141 |
+
|
142 |
+
# Return the response as-is without custom processing
|
143 |
+
return {
|
144 |
+
"id": f"chatcmpl-{uuid.uuid4()}",
|
145 |
+
"object": "chat.completion",
|
146 |
+
"created": int(datetime.now().timestamp()),
|
147 |
+
"model": request.model,
|
148 |
+
"choices": [
|
149 |
+
{
|
150 |
+
"index": 0,
|
151 |
+
"message": {"role": "assistant", "content": full_response},
|
152 |
+
"finish_reason": "stop",
|
153 |
+
}
|
154 |
+
],
|
155 |
+
"usage": None,
|
156 |
+
}
|