ameliakris's picture
Update Dockerfile and improve error handling
613c8f7
from fastapi import FastAPI, HTTPException, Depends
from fastapi.middleware.cors import CORSMiddleware
from pydantic import BaseModel
from typing import List, Dict, Optional
from supabase import create_client, Client
import os
from dotenv import load_dotenv
from llm import LLMPipeline
from chat import GeminiChat
load_dotenv()
app = FastAPI()
# CORS middleware
app.add_middleware(
CORSMiddleware,
allow_origins=["*"], # In production, replace with your frontend URL
allow_credentials=True,
allow_methods=["*"],
allow_headers=["*"],
)
@app.get("/health")
async def health_check():
"""Health check endpoint"""
return {"status": "healthy", "version": "1.0.0"}
# Initialize Supabase client
supabase_url = os.getenv("SUPABASE_URL")
supabase_key = os.getenv("SUPABASE_SERVICE_KEY")
if not supabase_url or not supabase_key:
raise ValueError("Supabase environment variables not set")
supabase: Client = create_client(supabase_url, supabase_key)
# Initialize AI models
llm = LLMPipeline()
gemini = GeminiChat()
class Message(BaseModel):
role: str
content: str
class ChatRequest(BaseModel):
messages: List[Message]
use_gemini: bool = True
temperature: float = 0.7
@app.post("/api/chat")
async def chat(request: ChatRequest):
try:
if request.use_gemini:
# Use Gemini for interactive chat
response = await gemini.chat(
[{"role": m.role, "content": m.content} for m in request.messages],
temperature=request.temperature
)
else:
# Use local LLM for specific tasks
last_message = request.messages[-1].content
response = await llm.generate(last_message)
# Store chat history in Supabase
supabase.table("chat_history").insert({
"messages": [m.dict() for m in request.messages],
"response": response,
"model": "gemini" if request.use_gemini else "local"
}).execute()
return {"response": response}
except Exception as e:
raise HTTPException(status_code=500, detail=str(e))
if __name__ == "__main__":
import uvicorn
uvicorn.run(app, host="0.0.0.0", port=7860)