Tim Luka Horstmann
commited on
Commit
Β·
0a03f8b
1
Parent(s):
44afc53
Import json and print
Browse files- llm_server.py +4 -0
llm_server.py
CHANGED
@@ -1,3 +1,4 @@
|
|
|
|
1 |
import time
|
2 |
from fastapi import FastAPI, HTTPException
|
3 |
from fastapi.responses import StreamingResponse, JSONResponse
|
@@ -136,6 +137,9 @@ async def keep_model_warm():
|
|
136 |
# βββ OpenAIβcompatible endpoint βββββββββββββββββββββββββββββββββββββββββββββ
|
137 |
@app.post("/v1/chat/completions")
|
138 |
async def chat(req: dict):
|
|
|
|
|
|
|
139 |
# if the client (Qwen-Agent) asked for a stream, proxy the SSE events:
|
140 |
if req.get("stream", False):
|
141 |
async def event_generator():
|
|
|
1 |
+
import json
|
2 |
import time
|
3 |
from fastapi import FastAPI, HTTPException
|
4 |
from fastapi.responses import StreamingResponse, JSONResponse
|
|
|
137 |
# βββ OpenAIβcompatible endpoint βββββββββββββββββββββββββββββββββββββββββββββ
|
138 |
@app.post("/v1/chat/completions")
|
139 |
async def chat(req: dict):
|
140 |
+
|
141 |
+
print("Request:", req)
|
142 |
+
|
143 |
# if the client (Qwen-Agent) asked for a stream, proxy the SSE events:
|
144 |
if req.get("stream", False):
|
145 |
async def event_generator():
|