File size: 2,242 Bytes
0e4080b
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
613c8f7
 
 
 
 
0e4080b
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
from fastapi import FastAPI, HTTPException, Depends
from fastapi.middleware.cors import CORSMiddleware
from pydantic import BaseModel
from typing import List, Dict, Optional
from supabase import create_client, Client
import os
from dotenv import load_dotenv

from llm import LLMPipeline
from chat import GeminiChat

load_dotenv()

app = FastAPI()

# CORS middleware
app.add_middleware(
    CORSMiddleware,
    allow_origins=["*"],  # In production, replace with your frontend URL
    allow_credentials=True,
    allow_methods=["*"],
    allow_headers=["*"],
)

@app.get("/health")
async def health_check():
    """Health check endpoint"""
    return {"status": "healthy", "version": "1.0.0"}

# Initialize Supabase client
supabase_url = os.getenv("SUPABASE_URL")
supabase_key = os.getenv("SUPABASE_SERVICE_KEY")

if not supabase_url or not supabase_key:
    raise ValueError("Supabase environment variables not set")

supabase: Client = create_client(supabase_url, supabase_key)

# Initialize AI models
llm = LLMPipeline()
gemini = GeminiChat()

class Message(BaseModel):
    role: str
    content: str

class ChatRequest(BaseModel):
    messages: List[Message]
    use_gemini: bool = True
    temperature: float = 0.7

@app.post("/api/chat")
async def chat(request: ChatRequest):
    try:
        if request.use_gemini:
            # Use Gemini for interactive chat
            response = await gemini.chat(
                [{"role": m.role, "content": m.content} for m in request.messages],
                temperature=request.temperature
            )
        else:
            # Use local LLM for specific tasks
            last_message = request.messages[-1].content
            response = await llm.generate(last_message)
        
        # Store chat history in Supabase
        supabase.table("chat_history").insert({
            "messages": [m.dict() for m in request.messages],
            "response": response,
            "model": "gemini" if request.use_gemini else "local"
        }).execute()
        
        return {"response": response}
    
    except Exception as e:
        raise HTTPException(status_code=500, detail=str(e))

if __name__ == "__main__":
    import uvicorn
    uvicorn.run(app, host="0.0.0.0", port=7860)