Spaces:
Sleeping
Sleeping
import os | |
from fastapi import FastAPI, Request | |
from pydantic import BaseModel | |
from fastapi.responses import JSONResponse | |
import httpx | |
app = FastAPI() | |
class QueryRequest(BaseModel): | |
query: str | |
provider: str # e.g., "openai", "anthropic", "gemini" | |
def read_root(): | |
return {"message": "Multi-Model Selector is running"} | |
async def ask_question(request: QueryRequest): | |
query = request.query | |
provider = request.provider.lower() | |
try: | |
if provider == "openai": | |
return await ask_openai(query) | |
elif provider == "anthropic": | |
return {"error": "Anthropic support not implemented yet"} | |
elif provider == "gemini": | |
return {"error": "Gemini support not implemented yet"} | |
else: | |
return JSONResponse(content={"error": f"Unknown provider: {provider}"}, status_code=400) | |
except Exception as e: | |
return JSONResponse(content={"error": str(e)}, status_code=500) | |
async def ask_openai(query: str): | |
openai_api_key = os.getenv("OPENAI_API_KEY") | |
if not openai_api_key: | |
return {"error": "API key not provided."} | |
headers = { | |
"Authorization": f"Bearer {openai_api_key}", | |
"Content-Type": "application/json" | |
} | |
payload = { | |
"model": "gpt-3.5-turbo", | |
"messages": [{"role": "user", "content": query}] | |
} | |
try: | |
async with httpx.AsyncClient() as client: | |
response = await client.post("https://api.openai.com/v1/chat/completions", headers=headers, json=payload) | |
response.raise_for_status() | |
answer = response.json()['choices'][0]['message']['content'] | |
return {"response": answer} | |
except httpx.HTTPStatusError as e: | |
return {"error": f"HTTP Status Error: {e.response.status_code}, {e.response.text}"} | |
except Exception as e: | |
return {"error": f"An error occurred: {str(e)}"} |