Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -102,16 +102,27 @@ def retrieve_chunks(question, index, embed_model, text_chunks, k=3):
|
|
102 |
return [text_chunks[i] for i in I[0]]
|
103 |
|
104 |
# ---------------- Groq Answer Generator ----------------
|
105 |
-
def generate_answer_with_groq(question, context):
|
106 |
url = "https://api.groq.com/openai/v1/chat/completions"
|
107 |
api_key = os.getenv("GROQ_API_KEY")
|
108 |
headers = {"Authorization": f"Bearer {api_key}", "Content-Type": "application/json"}
|
109 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
110 |
payload = {
|
111 |
"model": "llama3-8b-8192",
|
112 |
"messages": [
|
113 |
-
{"role": "system", "content":
|
114 |
-
{"role": "user", "content":
|
115 |
],
|
116 |
"temperature": 0.5,
|
117 |
"max_tokens": 300,
|
@@ -167,10 +178,11 @@ def load_orders():
|
|
167 |
return {}
|
168 |
|
169 |
def extract_order_id(text):
|
170 |
-
pattern = r"\
|
171 |
match = re.search(pattern, text, re.IGNORECASE)
|
172 |
if match:
|
173 |
-
return match.group(
|
|
|
174 |
return None
|
175 |
|
176 |
def format_order_response(order_id, order_data):
|
@@ -216,9 +228,8 @@ def process_messages_loop():
|
|
216 |
embeddings = embed_model.encode(text_chunks)
|
217 |
index = faiss.IndexFlatL2(embeddings.shape[1])
|
218 |
index.add(embeddings)
|
219 |
-
|
220 |
-
orders = load_orders()
|
221 |
-
|
222 |
seen_sids = set()
|
223 |
|
224 |
while True:
|
@@ -230,14 +241,23 @@ def process_messages_loop():
|
|
230 |
message = fetch_latest_incoming_message(twilio_client, conversation_sid)
|
231 |
if message and message["sid"] not in seen_sids:
|
232 |
seen_sids.add(message["sid"])
|
233 |
-
question = message["body"]
|
234 |
|
|
|
235 |
order_id = extract_order_id(question)
|
236 |
if order_id and order_id in orders:
|
237 |
answer = format_order_response(order_id, orders[order_id])
|
238 |
else:
|
|
|
239 |
chunks = retrieve_chunks(question, index, embed_model, text_chunks)
|
240 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
241 |
|
242 |
send_twilio_message(twilio_client, conversation_sid, answer)
|
243 |
|
|
|
102 |
return [text_chunks[i] for i in I[0]]
|
103 |
|
104 |
# ---------------- Groq Answer Generator ----------------
|
105 |
+
def generate_answer_with_groq(question, context, query_type="general"):
|
106 |
url = "https://api.groq.com/openai/v1/chat/completions"
|
107 |
api_key = os.getenv("GROQ_API_KEY")
|
108 |
headers = {"Authorization": f"Bearer {api_key}", "Content-Type": "application/json"}
|
109 |
+
|
110 |
+
system_prompt = (
|
111 |
+
"You are ToyBot, a friendly WhatsApp assistant for ToyShop. "
|
112 |
+
"You help customers with order status, FAQs, and product return policies. "
|
113 |
+
"Be polite, clear, and concise."
|
114 |
+
)
|
115 |
+
|
116 |
+
if query_type == "faq":
|
117 |
+
user_prompt = f"Customer asked FAQ or product policy question:\n'{question}'\n\nRelevant info:\n{context}"
|
118 |
+
else:
|
119 |
+
user_prompt = f"Customer asked:\n'{question}'\n\nRelevant info:\n{context}"
|
120 |
+
|
121 |
payload = {
|
122 |
"model": "llama3-8b-8192",
|
123 |
"messages": [
|
124 |
+
{"role": "system", "content": system_prompt},
|
125 |
+
{"role": "user", "content": user_prompt},
|
126 |
],
|
127 |
"temperature": 0.5,
|
128 |
"max_tokens": 300,
|
|
|
178 |
return {}
|
179 |
|
180 |
def extract_order_id(text):
|
181 |
+
pattern = r"(order_id\s+\d+)"
|
182 |
match = re.search(pattern, text, re.IGNORECASE)
|
183 |
if match:
|
184 |
+
return match.group(1).lower()
|
185 |
+
return Nonetch.group(0).upper()
|
186 |
return None
|
187 |
|
188 |
def format_order_response(order_id, order_data):
|
|
|
228 |
embeddings = embed_model.encode(text_chunks)
|
229 |
index = faiss.IndexFlatL2(embeddings.shape[1])
|
230 |
index.add(embeddings)
|
231 |
+
|
232 |
+
orders = load_orders()
|
|
|
233 |
seen_sids = set()
|
234 |
|
235 |
while True:
|
|
|
241 |
message = fetch_latest_incoming_message(twilio_client, conversation_sid)
|
242 |
if message and message["sid"] not in seen_sids:
|
243 |
seen_sids.add(message["sid"])
|
244 |
+
question = message["body"].strip()
|
245 |
|
246 |
+
# Check for order ID in question
|
247 |
order_id = extract_order_id(question)
|
248 |
if order_id and order_id in orders:
|
249 |
answer = format_order_response(order_id, orders[order_id])
|
250 |
else:
|
251 |
+
# Retrieve relevant KB chunks
|
252 |
chunks = retrieve_chunks(question, index, embed_model, text_chunks)
|
253 |
+
context = "\n\n".join(chunks).strip()
|
254 |
+
|
255 |
+
if context:
|
256 |
+
# Treat as FAQ or policy query
|
257 |
+
answer = generate_answer_with_groq(question, context, query_type="faq")
|
258 |
+
else:
|
259 |
+
# Fallback: general query without context
|
260 |
+
answer = generate_answer_with_groq(question, "", query_type="general")
|
261 |
|
262 |
send_twilio_message(twilio_client, conversation_sid, answer)
|
263 |
|