Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -36,10 +36,17 @@ def retrieve_answer(query, threshold=0.65, top_k=1):
|
|
36 |
query_embedding = model.encode([query])[0]
|
37 |
result = index.query(vector=query_embedding.tolist(), top_k=top_k, include_metadata=True)
|
38 |
|
39 |
-
|
|
|
|
|
|
|
|
|
|
|
40 |
metadata = result['matches'][0]['metadata']
|
|
|
41 |
return metadata.get('answer', 'پاسخ یافت نشد')
|
42 |
else:
|
|
|
43 |
return None
|
44 |
|
45 |
def generate_human_response(context_text):
|
@@ -62,7 +69,8 @@ def generate_human_response(context_text):
|
|
62 |
max_tokens=100,
|
63 |
)
|
64 |
return response['choices'][0]['message']['content'].strip()
|
65 |
-
except Exception:
|
|
|
66 |
return "خطا در پردازش درخواست."
|
67 |
|
68 |
def chat_interface(question):
|
|
|
36 |
query_embedding = model.encode([query])[0]
|
37 |
result = index.query(vector=query_embedding.tolist(), top_k=top_k, include_metadata=True)
|
38 |
|
39 |
+
# لاگ کامل نتیجه Pinecone
|
40 |
+
print("=== Pinecone query result ===")
|
41 |
+
print(json.dumps(result, indent=2, ensure_ascii=False))
|
42 |
+
print("============================")
|
43 |
+
|
44 |
+
if result.get('matches') and len(result['matches']) > 0 and result['matches'][0]['score'] > threshold:
|
45 |
metadata = result['matches'][0]['metadata']
|
46 |
+
print("Matched answer:", metadata.get('answer'))
|
47 |
return metadata.get('answer', 'پاسخ یافت نشد')
|
48 |
else:
|
49 |
+
print("No good match found.")
|
50 |
return None
|
51 |
|
52 |
def generate_human_response(context_text):
|
|
|
69 |
max_tokens=100,
|
70 |
)
|
71 |
return response['choices'][0]['message']['content'].strip()
|
72 |
+
except Exception as e:
|
73 |
+
print("OpenAI API error:", e)
|
74 |
return "خطا در پردازش درخواست."
|
75 |
|
76 |
def chat_interface(question):
|