Spaces:
Sleeping
Sleeping
File size: 1,927 Bytes
75a63b2 f9b91f5 56c26c5 c406c1a 75a63b2 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 |
import os
from fastapi import FastAPI, Request
from pydantic import BaseModel
from fastapi.responses import JSONResponse
import httpx
app = FastAPI()
class QueryRequest(BaseModel):
query: str
provider: str # e.g., "openai", "anthropic", "gemini"
@app.get("/")
def read_root():
return {"message": "Multi-Model Selector is running"}
@app.post("/ask")
async def ask_question(request: QueryRequest):
query = request.query
provider = request.provider.lower()
try:
if provider == "openai":
return await ask_openai(query)
elif provider == "anthropic":
return {"error": "Anthropic support not implemented yet"}
elif provider == "gemini":
return {"error": "Gemini support not implemented yet"}
else:
return JSONResponse(content={"error": f"Unknown provider: {provider}"}, status_code=400)
except Exception as e:
return JSONResponse(content={"error": str(e)}, status_code=500)
async def ask_openai(query: str):
openai_api_key = os.getenv("OPENAI_API_KEY")
if not openai_api_key:
return {"error": "API key not provided."}
headers = {
"Authorization": f"Bearer {openai_api_key}",
"Content-Type": "application/json"
}
payload = {
"model": "gpt-3.5-turbo",
"messages": [{"role": "user", "content": query}]
}
try:
async with httpx.AsyncClient() as client:
response = await client.post("https://api.openai.com/v1/chat/completions", headers=headers, json=payload)
response.raise_for_status()
answer = response.json()['choices'][0]['message']['content']
return {"response": answer}
except httpx.HTTPStatusError as e:
return {"error": f"HTTP Status Error: {e.response.status_code}, {e.response.text}"}
except Exception as e:
return {"error": f"An error occurred: {str(e)}"} |