|
from fastapi import FastAPI |
|
import os |
|
import ollama |
|
|
|
app = FastAPI() |
|
|
|
|
|
MODEL_NAME = "allenai/WildLlama-7b-user-assistant" |
|
OLLAMA_READY = os.getenv("HF_SPACE", "false").lower() == "true" |
|
|
|
@app.get("/") |
|
def home(): |
|
return {"message": "API is running!", "model_ready": OLLAMA_READY} |
|
|
|
@app.post("/chat") |
|
def chat(prompt: str): |
|
if not OLLAMA_READY: |
|
return {"error": "Ollama is not available on HF Spaces"} |
|
|
|
response = ollama.chat(model=MODEL_NAME, messages=[{"role": "user", "content": prompt}]) |
|
return {"response": response["message"]} |
|
|
|
|