test24 / api /routes.py
Niansuh's picture
Update api/routes.py
c7cc975 verified
raw
history blame
3.51 kB
import json
from fastapi import APIRouter, Depends, HTTPException, Request
from fastapi.responses import StreamingResponse, JSONResponse
from typing import Optional
from .auth import verify_app_secret
from .models import ChatRequest, ImageResponse
from .utils import strip_model_prefix
from .gizai import GizAI
from .logger import setup_logger
logger = setup_logger(__name__)
router = APIRouter()
@router.options("/v1/chat/completions")
@router.options("/api/v1/chat/completions")
async def chat_completions_options():
return Response(
status_code=200,
headers={
"Access-Control-Allow-Origin": "*",
"Access-Control-Allow-Methods": "POST, OPTIONS",
"Access-Control-Allow-Headers": "Content-Type, Authorization",
},
)
@router.get("/v1/models")
@router.get("/api/v1/models")
async def list_models():
return {"object": "list", "data": ALLOWED_MODELS}
@router.post("/v1/chat/completions")
@router.post("/api/v1/chat/completions")
async def chat_completions(
request: ChatRequest, app_secret: str = Depends(verify_app_secret)
):
logger.info("Entering chat_completions route")
logger.info(f"Processing chat completion request for model: {request.model}")
if request.model not in [model["id"] for model in ALLOWED_MODELS]:
raise HTTPException(
status_code=400,
detail=f"Model {request.model} is not allowed. Allowed models are: {', '.join(model['id'] for model in ALLOWED_MODELS)}",
)
if request.stream:
logger.info("Streaming response")
return StreamingResponse(process_streaming_response(request), media_type="text/event-stream")
else:
logger.info("Non-streaming response")
return await process_non_streaming_response(request)
# GizAI Routes
gizai_router = APIRouter(prefix="/gizai", tags=["GizAI"])
@gizai_router.options("/v1/chat/completions")
async def gizai_chat_completions_options():
return JSONResponse(
status_code=200,
headers={
"Access-Control-Allow-Origin": "*",
"Access-Control-Allow-Methods": "POST, OPTIONS",
"Access-Control-Allow-Headers": "Content-Type, Authorization",
},
)
@gizai_router.post("/v1/chat/completions")
async def gizai_chat_completions(
request: ChatRequest, app_secret: str = Depends(verify_app_secret)
):
logger.info("Entering GizAI chat_completions route")
logger.info(f"Processing chat completion request for model: {request.model}")
# Validate model
model = GizAI.get_model(request.model)
if model not in GizAI.models:
raise HTTPException(
status_code=400,
detail=f"Model {request.model} is not supported by GizAI. Supported models are: {', '.join(GizAI.models)}",
)
try:
async_generator = GizAI.create_async_generator(model=model, messages=request.messages)
return StreamingResponse(async_generator, media_type="application/json")
except Exception as e:
logger.error(f"Error in GizAI chat_completions: {e}", exc_info=True)
raise HTTPException(status_code=500, detail=str(e))
# Include GizAI router in the main router
router.include_router(gizai_router)
@router.route('/')
@router.route('/healthz')
@router.route('/ready')
@router.route('/alive')
@router.route('/status')
@router.get("/health")
def health_check(request: Request):
return Response(content=json.dumps({"status": "ok"}), media_type="application/json")