Update api/utils.py
Browse files- api/utils.py +4 -6
api/utils.py
CHANGED
@@ -1,12 +1,12 @@
|
|
1 |
# api/utils.py
|
2 |
|
3 |
-
import json
|
4 |
from datetime import datetime
|
5 |
-
import
|
6 |
from typing import Any, Dict, Optional
|
|
|
|
|
7 |
|
8 |
import httpx
|
9 |
-
from fastapi import HTTPException
|
10 |
from api.config import (
|
11 |
MODEL_MAPPING,
|
12 |
MODEL_ALIASES,
|
@@ -15,13 +15,13 @@ from api.config import (
|
|
15 |
TRENDING_AGENT_MODE,
|
16 |
BASE_URL
|
17 |
)
|
|
|
18 |
from api.models import ChatRequest
|
19 |
|
20 |
from api.logger import setup_logger
|
21 |
|
22 |
logger = setup_logger(__name__)
|
23 |
|
24 |
-
|
25 |
def create_chat_completion_data(
|
26 |
content: str, model: str, timestamp: int, finish_reason: Optional[str] = None
|
27 |
) -> Dict[str, Any]:
|
@@ -40,7 +40,6 @@ def create_chat_completion_data(
|
|
40 |
"usage": None,
|
41 |
}
|
42 |
|
43 |
-
|
44 |
def message_to_dict(message):
|
45 |
if isinstance(message.content, str):
|
46 |
return {"role": message.role, "content": message.content}
|
@@ -57,7 +56,6 @@ def message_to_dict(message):
|
|
57 |
else:
|
58 |
return {"role": message.role, "content": message.content}
|
59 |
|
60 |
-
|
61 |
async def process_streaming_response(request: ChatRequest):
|
62 |
# Map the requested model to the actual model used by the API
|
63 |
model = MODEL_MAPPING.get(request.model, MODEL_ALIASES.get(request.model, "blackboxai"))
|
|
|
1 |
# api/utils.py
|
2 |
|
|
|
3 |
from datetime import datetime
|
4 |
+
import json
|
5 |
from typing import Any, Dict, Optional
|
6 |
+
import uuid
|
7 |
+
import re
|
8 |
|
9 |
import httpx
|
|
|
10 |
from api.config import (
|
11 |
MODEL_MAPPING,
|
12 |
MODEL_ALIASES,
|
|
|
15 |
TRENDING_AGENT_MODE,
|
16 |
BASE_URL
|
17 |
)
|
18 |
+
from fastapi import HTTPException
|
19 |
from api.models import ChatRequest
|
20 |
|
21 |
from api.logger import setup_logger
|
22 |
|
23 |
logger = setup_logger(__name__)
|
24 |
|
|
|
25 |
def create_chat_completion_data(
|
26 |
content: str, model: str, timestamp: int, finish_reason: Optional[str] = None
|
27 |
) -> Dict[str, Any]:
|
|
|
40 |
"usage": None,
|
41 |
}
|
42 |
|
|
|
43 |
def message_to_dict(message):
|
44 |
if isinstance(message.content, str):
|
45 |
return {"role": message.role, "content": message.content}
|
|
|
56 |
else:
|
57 |
return {"role": message.role, "content": message.content}
|
58 |
|
|
|
59 |
async def process_streaming_response(request: ChatRequest):
|
60 |
# Map the requested model to the actual model used by the API
|
61 |
model = MODEL_MAPPING.get(request.model, MODEL_ALIASES.get(request.model, "blackboxai"))
|