flare / llm_request.py
ciyidogan's picture
Create llm_request.py
b46d0f1 verified
raw
history blame
667 Bytes
import requests
from log import log
def request_spark_model(spark_url, project_name, user_input, system_prompt, chat_history):
try:
response = requests.post(
f"{spark_url}/generate",
json={
"project_name": project_name,
"user_input": user_input,
"system_prompt": system_prompt,
"context": chat_history
},
timeout=60
)
response.raise_for_status()
return response.json().get("model_answer")
except Exception as e:
log(f"❌ Spark microservice hatası: {e}")
return "Hata: Model cevabı alınamadı."