Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -102,27 +102,16 @@ def retrieve_chunks(question, index, embed_model, text_chunks, k=3):
|
|
102 |
return [text_chunks[i] for i in I[0]]
|
103 |
|
104 |
# ---------------- Groq Answer Generator ----------------
|
105 |
-
def generate_answer_with_groq(question, context
|
106 |
url = "https://api.groq.com/openai/v1/chat/completions"
|
107 |
api_key = os.getenv("GROQ_API_KEY")
|
108 |
headers = {"Authorization": f"Bearer {api_key}", "Content-Type": "application/json"}
|
109 |
-
|
110 |
-
system_prompt = (
|
111 |
-
"You are ToyBot, a friendly WhatsApp assistant for ToyShop. "
|
112 |
-
"You help customers with order status, FAQs, and product return policies. "
|
113 |
-
"Be polite, clear, and concise."
|
114 |
-
)
|
115 |
-
|
116 |
-
if query_type == "faq":
|
117 |
-
user_prompt = f"Customer asked FAQ or product policy question:\n'{question}'\n\nRelevant info:\n{context}"
|
118 |
-
else:
|
119 |
-
user_prompt = f"Customer asked:\n'{question}'\n\nRelevant info:\n{context}"
|
120 |
-
|
121 |
payload = {
|
122 |
"model": "llama3-8b-8192",
|
123 |
"messages": [
|
124 |
-
{"role": "system", "content":
|
125 |
-
{"role": "user", "content":
|
126 |
],
|
127 |
"temperature": 0.5,
|
128 |
"max_tokens": 300,
|
@@ -178,11 +167,11 @@ def load_orders():
|
|
178 |
return {}
|
179 |
|
180 |
def extract_order_id(text):
|
181 |
-
pattern
|
|
|
182 |
match = re.search(pattern, text, re.IGNORECASE)
|
183 |
if match:
|
184 |
-
return match.group(1)
|
185 |
-
return Nonetch.group(0).upper()
|
186 |
return None
|
187 |
|
188 |
def format_order_response(order_id, order_data):
|
@@ -228,8 +217,9 @@ def process_messages_loop():
|
|
228 |
embeddings = embed_model.encode(text_chunks)
|
229 |
index = faiss.IndexFlatL2(embeddings.shape[1])
|
230 |
index.add(embeddings)
|
231 |
-
|
232 |
-
orders = load_orders()
|
|
|
233 |
seen_sids = set()
|
234 |
|
235 |
while True:
|
@@ -241,23 +231,14 @@ def process_messages_loop():
|
|
241 |
message = fetch_latest_incoming_message(twilio_client, conversation_sid)
|
242 |
if message and message["sid"] not in seen_sids:
|
243 |
seen_sids.add(message["sid"])
|
244 |
-
question = message["body"]
|
245 |
|
246 |
-
# Check for order ID in question
|
247 |
order_id = extract_order_id(question)
|
248 |
if order_id and order_id in orders:
|
249 |
answer = format_order_response(order_id, orders[order_id])
|
250 |
else:
|
251 |
-
# Retrieve relevant KB chunks
|
252 |
chunks = retrieve_chunks(question, index, embed_model, text_chunks)
|
253 |
-
|
254 |
-
|
255 |
-
if context:
|
256 |
-
# Treat as FAQ or policy query
|
257 |
-
answer = generate_answer_with_groq(question, context, query_type="faq")
|
258 |
-
else:
|
259 |
-
# Fallback: general query without context
|
260 |
-
answer = generate_answer_with_groq(question, "", query_type="general")
|
261 |
|
262 |
send_twilio_message(twilio_client, conversation_sid, answer)
|
263 |
|
|
|
102 |
return [text_chunks[i] for i in I[0]]
|
103 |
|
104 |
# ---------------- Groq Answer Generator ----------------
|
105 |
+
def generate_answer_with_groq(question, context):
|
106 |
url = "https://api.groq.com/openai/v1/chat/completions"
|
107 |
api_key = os.getenv("GROQ_API_KEY")
|
108 |
headers = {"Authorization": f"Bearer {api_key}", "Content-Type": "application/json"}
|
109 |
+
prompt = f"Customer asked: '{question}'\n\nHere is the relevant information to help:\n{context}"
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
110 |
payload = {
|
111 |
"model": "llama3-8b-8192",
|
112 |
"messages": [
|
113 |
+
{"role": "system", "content": "You are ToyBot, a friendly WhatsApp assistant specialized in toy shop customer service."},
|
114 |
+
{"role": "user", "content": prompt},
|
115 |
],
|
116 |
"temperature": 0.5,
|
117 |
"max_tokens": 300,
|
|
|
167 |
return {}
|
168 |
|
169 |
def extract_order_id(text):
|
170 |
+
# Extract pattern: "order_id <digits>"
|
171 |
+
pattern = r"order_id\s+(\d+)"
|
172 |
match = re.search(pattern, text, re.IGNORECASE)
|
173 |
if match:
|
174 |
+
return match.group(1) # just digits as string
|
|
|
175 |
return None
|
176 |
|
177 |
def format_order_response(order_id, order_data):
|
|
|
217 |
embeddings = embed_model.encode(text_chunks)
|
218 |
index = faiss.IndexFlatL2(embeddings.shape[1])
|
219 |
index.add(embeddings)
|
220 |
+
|
221 |
+
orders = load_orders() # Load orders once at start
|
222 |
+
|
223 |
seen_sids = set()
|
224 |
|
225 |
while True:
|
|
|
231 |
message = fetch_latest_incoming_message(twilio_client, conversation_sid)
|
232 |
if message and message["sid"] not in seen_sids:
|
233 |
seen_sids.add(message["sid"])
|
234 |
+
question = message["body"]
|
235 |
|
|
|
236 |
order_id = extract_order_id(question)
|
237 |
if order_id and order_id in orders:
|
238 |
answer = format_order_response(order_id, orders[order_id])
|
239 |
else:
|
|
|
240 |
chunks = retrieve_chunks(question, index, embed_model, text_chunks)
|
241 |
+
answer = generate_answer_with_groq(question, "\n\n".join(chunks))
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
242 |
|
243 |
send_twilio_message(twilio_client, conversation_sid, answer)
|
244 |
|