Spaces:
Sleeping
Sleeping
Update inference.py
Browse files- inference.py +15 -0
inference.py
CHANGED
@@ -70,3 +70,18 @@ def get_gpt_response(query, user_context=""):
|
|
70 |
# ✅ Final callable interface
|
71 |
def infer(query, options, user_context=""):
|
72 |
return get_evo_response(query, options, user_context)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
70 |
# ✅ Final callable interface
|
71 |
def infer(query, options, user_context=""):
|
72 |
return get_evo_response(query, options, user_context)
|
73 |
+
|
74 |
+
# 🧠 Unified chat-style interface for EvoRAG
|
75 |
+
def evo_chat_predict(history, query, options):
|
76 |
+
# Use the last few exchanges as context (up to 3 pairs)
|
77 |
+
context = "\n".join(history[-6:]) if history else ""
|
78 |
+
|
79 |
+
evo_ans, evo_score, evo_reason, evo_ctx = get_evo_response(query, options, context)
|
80 |
+
|
81 |
+
return {
|
82 |
+
"answer": evo_ans,
|
83 |
+
"confidence": round(evo_score, 3),
|
84 |
+
"reasoning": evo_reason,
|
85 |
+
"context_used": evo_ctx
|
86 |
+
}
|
87 |
+
|