fixed streaming issue
Browse files
main.py
CHANGED
@@ -1,3 +1,30 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
@app.post("/v1/chat/completions")
|
2 |
async def chat_completion(request: ChatRequest):
|
3 |
headers = {
|
|
|
1 |
+
from fastapi import FastAPI, Request
|
2 |
+
from fastapi.responses import StreamingResponse, JSONResponse
|
3 |
+
from pydantic import BaseModel
|
4 |
+
import requests
|
5 |
+
import time
|
6 |
+
import json
|
7 |
+
from typing import List, Optional
|
8 |
+
from models import AVAILABLE_MODELS
|
9 |
+
|
10 |
+
app = FastAPI()
|
11 |
+
|
12 |
+
def unix_id():
|
13 |
+
return str(int(time.time() * 1000))
|
14 |
+
|
15 |
+
class Message(BaseModel):
|
16 |
+
role: str
|
17 |
+
content: str
|
18 |
+
|
19 |
+
class ChatRequest(BaseModel):
|
20 |
+
messages: List[Message]
|
21 |
+
model: str
|
22 |
+
stream: Optional[bool] = False
|
23 |
+
|
24 |
+
@app.get("/v1/models")
|
25 |
+
async def list_models():
|
26 |
+
return {"object": "list", "data": AVAILABLE_MODELS}
|
27 |
+
|
28 |
@app.post("/v1/chat/completions")
|
29 |
async def chat_completion(request: ChatRequest):
|
30 |
headers = {
|