Spaces:
Sleeping
Sleeping
import time | |
from fastapi import FastAPI, Request, HTTPException | |
from pydantic import BaseModel | |
from duckai import DuckAI | |
import uvicorn | |
app = FastAPI() | |
API_PREFIX = "/" | |
# Middleware for logging request time | |
async def log_process_time(request: Request, call_next): | |
start_time = time.time() | |
response = await call_next(request) | |
process_time = time.time() - start_time | |
print(f"{request.method} {response.status_code} {request.url.path} {process_time*1000:.2f} ms") | |
return response | |
# Request body model | |
class ChatCompletionRequest(BaseModel): | |
model: str | |
messages: list[dict] | |
async def root(): | |
return {"message": "API server running"} | |
async def ping(): | |
return {"message": "pong"} | |
async def get_models(): | |
return { | |
"object": "list", | |
"data": [ | |
{"id": "gpt-4o-mini", "object": "model", "owned_by": "ddg"}, | |
{"id": "claude-3-haiku", "object": "model", "owned_by": "ddg"}, | |
{"id": "llama-3.1-70b", "object": "model", "owned_by": "ddg"}, | |
{"id": "mixtral-8x7b", "object": "model", "owned_by": "ddg"}, | |
{"id": "o3-mini", "object": "model", "owned_by": "ddg"}, | |
], | |
} | |
async def chat_completions(request: ChatCompletionRequest): | |
try: | |
# Only using DuckAI directly | |
content = " ".join([msg.get("content", "") for msg in request.messages]) | |
duck = DuckAI() | |
results = duck.chat(content, model=request.model) | |
response = create_complete_response(results, request.model) | |
return response | |
except Exception as e: | |
raise HTTPException(status_code=500, detail=str(e)) | |
def create_complete_response(text: str, model: str) -> dict: | |
"""Create a complete non-streaming response""" | |
return { | |
"id": "chatcmpl-123", | |
"object": "chat.completion", | |
"created": int(time.time()), | |
"model": model, | |
"usage": {"prompt_tokens": 0, "completion_tokens": 0, "total_tokens": 0}, | |
"choices": [ | |
{ | |
"message": {"content": text, "role": "assistant"}, | |
"index": 0, | |
"finish_reason": "stop", | |
}, | |
], | |
} | |
if __name__ == "__main__": | |
uvicorn.run("app:app", host="0.0.0.0", port=7860, reload=True) | |