Spaces:
Paused
Paused
Update llm_request.py
Browse files- llm_request.py +13 -0
llm_request.py
CHANGED
@@ -1,8 +1,18 @@
|
|
|
|
1 |
import requests
|
2 |
from log import log
|
3 |
|
4 |
def request_spark_model(spark_url, project_name, user_input, system_prompt, chat_history):
|
5 |
try:
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
6 |
response = requests.post(
|
7 |
f"{spark_url}/generate",
|
8 |
json={
|
@@ -11,10 +21,13 @@ def request_spark_model(spark_url, project_name, user_input, system_prompt, chat
|
|
11 |
"system_prompt": system_prompt,
|
12 |
"context": chat_history
|
13 |
},
|
|
|
14 |
timeout=60
|
15 |
)
|
16 |
response.raise_for_status()
|
|
|
17 |
return response.json().get("model_answer")
|
|
|
18 |
except Exception as e:
|
19 |
log(f"❌ Spark microservice hatası: {e}")
|
20 |
return "Hata: Model cevabı alınamadı."
|
|
|
1 |
+
import os
|
2 |
import requests
|
3 |
from log import log
|
4 |
|
5 |
def request_spark_model(spark_url, project_name, user_input, system_prompt, chat_history):
|
6 |
try:
|
7 |
+
hf_token = os.getenv("HF_TOKEN") # Flare’in kendi ortamındaki token
|
8 |
+
if not hf_token:
|
9 |
+
raise Exception("HF_TOKEN ortam değişkeni tanımlı değil.")
|
10 |
+
|
11 |
+
headers = {
|
12 |
+
"Authorization": f"Bearer {hf_token}",
|
13 |
+
"Content-Type": "application/json"
|
14 |
+
}
|
15 |
+
|
16 |
response = requests.post(
|
17 |
f"{spark_url}/generate",
|
18 |
json={
|
|
|
21 |
"system_prompt": system_prompt,
|
22 |
"context": chat_history
|
23 |
},
|
24 |
+
headers=headers,
|
25 |
timeout=60
|
26 |
)
|
27 |
response.raise_for_status()
|
28 |
+
log("✅ Spark cevabı başarıyla alındı.")
|
29 |
return response.json().get("model_answer")
|
30 |
+
|
31 |
except Exception as e:
|
32 |
log(f"❌ Spark microservice hatası: {e}")
|
33 |
return "Hata: Model cevabı alınamadı."
|