gemma / app.py
luck210's picture
Update app.py
3197d99 verified
raw
history blame
622 Bytes
from fastapi import FastAPI
import os
import ollama
app = FastAPI()
# Charger le modèle (vérifier si Hugging Face Spaces est prêt)
MODEL_NAME = "allenai/WildLlama-7b-user-assistant"
OLLAMA_READY = os.getenv("HF_SPACE", "false").lower() == "true"
@app.get("/")
def home():
return {"message": "API is running!", "model_ready": OLLAMA_READY}
@app.post("/chat")
def chat(prompt: str):
if not OLLAMA_READY:
return {"error": "Ollama is not available on HF Spaces"}
response = ollama.chat(model=MODEL_NAME, messages=[{"role": "user", "content": prompt}])
return {"response": response["message"]}