flare / llm_request.py
ciyidogan's picture
Update llm_request.py
1600223 verified
raw
history blame
1.07 kB
import os
import requests
from log import log
def request_spark_model(spark_url, project_name, user_input, system_prompt, chat_history):
try:
hf_token = os.getenv("HF_TOKEN") # Flare’in kendi ortamındaki token
if not hf_token:
raise Exception("HF_TOKEN ortam değişkeni tanımlı değil.")
headers = {
"Authorization": f"Bearer {hf_token}",
"Content-Type": "application/json"
}
response = requests.post(
f"{spark_url}/generate",
json={
"project_name": project_name,
"user_input": user_input,
"system_prompt": system_prompt,
"context": chat_history
},
headers=headers,
timeout=60
)
response.raise_for_status()
log("✅ Spark cevabı başarıyla alındı.")
return response.json().get("model_answer")
except Exception as e:
log(f"❌ Spark microservice hatası: {e}")
return "Hata: Model cevabı alınamadı."