masadonline commited on
Commit
488b2e6
·
verified ·
1 Parent(s): b10610e

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +82 -68
app.py CHANGED
@@ -15,12 +15,12 @@ import re
15
  st.set_page_config(page_title="RAG Customer Support Chatbot", layout="wide")
16
 
17
  # --- Default Configurations & File Paths ---
18
- DEFAULT_TWILIO_ACCOUNT_SID_FALLBACK = "" # Fallback if secret "TWILIO_SID" is not found
19
- DEFAULT_TWILIO_AUTH_TOKEN_FALLBACK = "" # Fallback if secret "TWILIO_TOKEN" is not found
20
- DEFAULT_GROQ_API_KEY_FALLBACK = "" # Fallback if secret "GROQ_API_KEY" is not found
21
 
22
  DEFAULT_TWILIO_CONVERSATION_SERVICE_SID = ""
23
- DEFAULT_TWILIO_BOT_WHATSAPP_IDENTITY = st.secrets.get("TWILIO_PHONE_NUMBER", "whatsapp:+14155238886") # Twilio Sandbox default
24
  DEFAULT_EMBEDDING_MODEL_NAME = "sentence-transformers/all-MiniLM-L6-v2"
25
  DEFAULT_POLLING_INTERVAL_S = 30
26
  DOCS_FOLDER = "docs/"
@@ -30,19 +30,16 @@ POLICY_PDF_FILE = os.path.join(DOCS_FOLDER, "ProductReturnPolicy.pdf")
30
  FAQ_PDF_FILE = os.path.join(DOCS_FOLDER, "FAQ.pdf")
31
 
32
  # --- Application Secrets Configuration ---
33
- # These are the primary keys fetched from st.secrets as per user request
34
  APP_TWILIO_ACCOUNT_SID = st.secrets.get("TWILIO_ACCOUNT_SID")
35
  APP_TWILIO_AUTH_TOKEN = st.secrets.get("TWILIO_AUTH_TOKEN")
36
  APP_GROQ_API_KEY = st.secrets.get("GROQ_API_KEY")
37
 
38
- # Other secrets with fallback to defaults/sidebar input (if secrets not found)
39
  APP_TWILIO_CONVERSATION_SERVICE_SID_SECRET = st.secrets.get("TWILIO_CONVERSATION_SERVICE_SID")
40
  APP_TWILIO_BOT_WHATSAPP_IDENTITY_SECRET = st.secrets.get("TWILIO_BOT_WHATSAPP_IDENTITY")
41
 
42
 
43
  # --- RAG Processing Utilities ---
44
  def load_json_data(file_path):
45
- """Loads data from a JSON file."""
46
  try:
47
  with open(file_path, 'r', encoding='utf-8') as f:
48
  data = json.load(f)
@@ -58,7 +55,6 @@ def load_json_data(file_path):
58
  return None
59
 
60
  def load_pdf_data(file_path):
61
- """Extracts text from a PDF file, page by page."""
62
  try:
63
  with open(file_path, 'rb') as f:
64
  reader = PyPDF2.PdfReader(f)
@@ -75,7 +71,6 @@ def load_pdf_data(file_path):
75
  return []
76
 
77
  def chunk_text(text_pages, chunk_size=1000, chunk_overlap=200):
78
- """Chunks text from PDF pages into smaller, overlapping pieces."""
79
  full_text = "\n".join(text_pages)
80
  if not full_text.strip():
81
  return []
@@ -93,7 +88,6 @@ def chunk_text(text_pages, chunk_size=1000, chunk_overlap=200):
93
 
94
  @st.cache_resource(show_spinner="Initializing embedding model...")
95
  def initialize_embedding_model(model_name=DEFAULT_EMBEDDING_MODEL_NAME):
96
- """Initializes and returns a SentenceTransformer model."""
97
  try:
98
  model = SentenceTransformer(model_name)
99
  return model
@@ -103,7 +97,6 @@ def initialize_embedding_model(model_name=DEFAULT_EMBEDDING_MODEL_NAME):
103
 
104
  @st.cache_resource(show_spinner="Building FAISS index for PDF documents...")
105
  def create_faiss_index(_text_chunks, _embedding_model):
106
- """Creates a FAISS index from text chunks and an embedding model."""
107
  if not _text_chunks or _embedding_model is None:
108
  st.warning("Cannot create FAISS index: No text chunks or embedding model available.")
109
  return None, []
@@ -127,7 +120,6 @@ def create_faiss_index(_text_chunks, _embedding_model):
127
  return None, []
128
 
129
  def search_faiss_index(index, query_text, embedding_model, indexed_chunks, k=3):
130
- """Searches the FAISS index and returns top_k relevant chunk texts."""
131
  if index is None or embedding_model is None or not query_text:
132
  return []
133
  try:
@@ -146,7 +138,6 @@ def search_faiss_index(index, query_text, embedding_model, indexed_chunks, k=3):
146
  return []
147
 
148
  def get_order_details(order_id, customer_orders_data):
149
- """Retrieves order details for a given order_id."""
150
  if not customer_orders_data:
151
  return "Customer order data is not loaded."
152
  for order in customer_orders_data:
@@ -155,7 +146,6 @@ def get_order_details(order_id, customer_orders_data):
155
  return f"No order found with ID: {order_id}."
156
 
157
  def get_product_info(query, products_data):
158
- """Retrieves product information based on a query."""
159
  if not products_data:
160
  st.warning("Product data is not loaded or is empty in get_product_info.")
161
  return "Product data is not loaded."
@@ -195,11 +185,6 @@ def get_product_info(query, products_data):
195
  def generate_response_groq(_groq_client, query, context, model="llama3-8b-8192",
196
  intent=None, customer_name=None, item_name=None,
197
  shipping_address=None, delivery_date=None, order_id=None, order_status=None):
198
- """
199
- Generates a response using GROQ LLaMA3 API.
200
- If intent is ORDER_STATUS and customer_name, order_id, order_status are provided,
201
- it crafts a more personalized prompt.
202
- """
203
  if not _groq_client:
204
  return "GROQ client not initialized. Please check API key."
205
  if not query:
@@ -209,34 +194,58 @@ def generate_response_groq(_groq_client, query, context, model="llama3-8b-8192",
209
  user_prompt = ""
210
 
211
  if intent == "ORDER_STATUS" and order_id and customer_name and order_status:
212
- system_message = f"You are a friendly and helpful customer support assistant. Your primary goal is to provide a personalized update about order {order_id} for {customer_name}."
 
 
 
 
213
 
214
- item_description = item_name if item_name else "your item(s)" # Default if item_name is not found
215
 
216
- # Base of the user prompt, asking the LLM to formulate the response
217
- user_prompt = f"The user, {customer_name}, asked the following question: '{query}'.\n"
218
- user_prompt += f"You have the following details for their order {order_id}:\n"
219
- user_prompt += f"- Item(s): {item_description}\n"
220
- user_prompt += f"- Current Status: '{order_status}'\n"
 
221
 
222
  if order_status.lower() == "delivered":
223
- user_prompt += f"- Shipping Address: {shipping_address if shipping_address else 'their address'}\n"
224
- user_prompt += f"- Delivered On: {delivery_date if delivery_date else 'the specified date'}\n"
225
- user_prompt += f"\nPlease formulate a human-like response to {customer_name}. For example: "
226
- user_prompt += f"'Hi {customer_name}, regarding your order {order_id} for {item_description}, it has been \"{order_status}\". "
227
- if shipping_address and delivery_date:
228
- user_prompt += f"It was delivered to {shipping_address} on {delivery_date}.'"
229
- elif shipping_address:
230
- user_prompt += f"It was delivered to {shipping_address}. (Delivery date not specified).'"
231
- elif delivery_date:
232
- user_prompt += f"It was delivered on {delivery_date}. (Shipping address not specified).'"
233
  else:
234
- user_prompt += "(Delivery details not fully specified).'"
235
- else: # Not delivered or other status
236
- user_prompt += f"\nPlease formulate a human-like response to {customer_name}. For example: "
237
- user_prompt += f"'Hi {customer_name}, the current status of your order {order_id} for {item_description} is \"{order_status}\".'"
238
 
239
- user_prompt += f"\n\nFor your reference, the full context originally retrieved from the database was: {context}"
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
240
 
241
  else: # Default prompt structure for other intents or if details are missing
242
  system_message = "You are a helpful customer support assistant."
@@ -257,9 +266,12 @@ Assistant Answer:
257
  {"role": "system", "content": system_message},
258
  {"role": "user", "content": user_prompt}
259
  ],
260
- model=model, temperature=0.7, max_tokens=1024, top_p=1
 
 
 
261
  )
262
- response = chat_completion.choices[0].message.content
263
  return response
264
  except Exception as e:
265
  st.error(f"Error calling GROQ API: {e}")
@@ -267,7 +279,6 @@ Assistant Answer:
267
 
268
 
269
  def initialize_groq_client(api_key_val):
270
- """Initializes the GROQ client."""
271
  if not api_key_val:
272
  st.warning("GROQ API Key is missing.")
273
  return None
@@ -280,7 +291,6 @@ def initialize_groq_client(api_key_val):
280
 
281
  # --- Twilio Operations ---
282
  def initialize_twilio_client(acc_sid, auth_tkn):
283
- """Initializes the Twilio client."""
284
  if not acc_sid or not auth_tkn:
285
  st.warning("Twilio Account SID or Auth Token is missing.")
286
  return None
@@ -293,7 +303,6 @@ def initialize_twilio_client(acc_sid, auth_tkn):
293
 
294
  def get_new_whatsapp_messages(twilio_client, conversation_service_sid_val, bot_start_time_utc,
295
  processed_message_sids, bot_whatsapp_identity_val):
296
- """Fetches new, unanswered WhatsApp messages from Twilio Conversations."""
297
  if not twilio_client:
298
  st.warning("Twilio client not initialized.")
299
  return []
@@ -336,7 +345,6 @@ def get_new_whatsapp_messages(twilio_client, conversation_service_sid_val, bot_s
336
  return sorted(new_messages_to_process, key=lambda m: m['timestamp_utc'])
337
 
338
  def send_whatsapp_message(twilio_client, conversation_service_sid_val, conversation_sid, message_body, bot_identity_val):
339
- """Sends a message to a Twilio Conversation from the bot's identity."""
340
  if not twilio_client:
341
  st.error("Twilio client not initialized for sending message.")
342
  return False
@@ -549,13 +557,9 @@ if st.session_state.get("app_started") and st.session_state.get("rag_pipeline_re
549
 
550
  context_for_llm, raw_context_data = "No specific context could be retrieved.", None
551
 
552
- # Parameters for personalized LLM response
553
- extracted_customer_name = None
554
- extracted_item_name = None
555
- extracted_shipping_address = None
556
- extracted_delivery_date = None
557
- extracted_order_id = None
558
- extracted_order_status = None
559
 
560
  if intent == "ORDER_STATUS":
561
  order_id_to_check = None
@@ -568,9 +572,12 @@ if st.session_state.get("app_started") and st.session_state.get("rag_pipeline_re
568
 
569
  if order_id_to_check:
570
  raw_context_data = get_order_details(order_id_to_check, st.session_state.customer_orders_data)
571
- context_for_llm = f"Order Details for {order_id_to_check}: {raw_context_data}"
572
- # Try to parse details for personalized response
573
- if not raw_context_data.startswith("No order found") and not raw_context_data.startswith("Customer order data is not loaded"):
 
 
 
574
  try:
575
  order_data_dict = json.loads(raw_context_data)
576
  extracted_customer_name = order_data_dict.get("customer_name")
@@ -578,21 +585,23 @@ if st.session_state.get("app_started") and st.session_state.get("rag_pipeline_re
578
  if items and len(items) > 0 and isinstance(items[0], dict):
579
  extracted_item_name = items[0].get("name", "your item(s)")
580
  else:
581
- extracted_item_name = "your item(s)"
582
  extracted_shipping_address = order_data_dict.get("shipping_address")
583
  extracted_delivery_date = order_data_dict.get("delivered_on")
584
  extracted_order_status = order_data_dict.get("status")
585
- extracted_order_id = order_data_dict.get("order_id")
586
  except json.JSONDecodeError:
587
  st.warning(f"Could not parse order details JSON for {order_id_to_check} for personalization.")
588
- # raw_context_data already holds the error or JSON string
 
 
589
  else:
590
  context_for_llm = "To check an order status, please provide a valid Order ID (e.g., ORD123)."
591
  raw_context_data = {"message": "Order ID needed or not found in query."}
592
 
593
  elif intent == "PRODUCT_INFO":
594
  raw_context_data = get_product_info(user_query_manual, st.session_state.products_data)
595
- context_for_llm = f"Product Information related to '{user_query_manual}': {raw_context_data}"
596
 
597
  elif intent == "GENERAL_POLICY_FAQ" or intent == "UNKNOWN":
598
  if st.session_state.faiss_index_pdfs and st.session_state.embedding_model and st.session_state.indexed_pdf_chunks:
@@ -612,13 +621,13 @@ if st.session_state.get("app_started") and st.session_state.get("rag_pipeline_re
612
  llm_response = generate_response_groq(
613
  _groq_client=st.session_state.groq_client,
614
  query=user_query_manual,
615
- context=context_for_llm, # Pass the full context string for LLM's general reference
616
  intent=intent,
617
  customer_name=extracted_customer_name,
618
  item_name=extracted_item_name,
619
  shipping_address=extracted_shipping_address,
620
  delivery_date=extracted_delivery_date,
621
- order_id=extracted_order_id,
622
  order_status=extracted_order_status
623
  )
624
 
@@ -675,9 +684,10 @@ if st.session_state.get("bot_started") and st.session_state.get("rag_pipeline_re
675
 
676
  context_for_llm_whatsapp = "No specific context could be retrieved."
677
  raw_context_data_whatsapp = None
 
 
 
678
 
679
- # Parameters for personalized LLM response
680
- wa_customer_name, wa_item_name, wa_shipping_address, wa_delivery_date, wa_order_id, wa_order_status = [None] * 6
681
 
682
  if intent_whatsapp == "ORDER_STATUS":
683
  order_id_to_check_whatsapp = None
@@ -690,8 +700,9 @@ if st.session_state.get("bot_started") and st.session_state.get("rag_pipeline_re
690
 
691
  if order_id_to_check_whatsapp:
692
  raw_context_data_whatsapp = get_order_details(order_id_to_check_whatsapp, st.session_state.customer_orders_data)
693
- context_for_llm_whatsapp = f"Order Details for {order_id_to_check_whatsapp}: {raw_context_data_whatsapp}"
694
- if not raw_context_data_whatsapp.startswith("No order found") and not raw_context_data_whatsapp.startswith("Customer order data is not loaded"):
 
695
  try:
696
  order_data_dict_wa = json.loads(raw_context_data_whatsapp)
697
  wa_customer_name = order_data_dict_wa.get("customer_name")
@@ -706,6 +717,9 @@ if st.session_state.get("bot_started") and st.session_state.get("rag_pipeline_re
706
  wa_order_id = order_data_dict_wa.get("order_id")
707
  except json.JSONDecodeError:
708
  st.warning(f"Could not parse order details JSON for {order_id_to_check_whatsapp} (WhatsApp) for personalization.")
 
 
 
709
  else:
710
  context_for_llm_whatsapp = "To check an order status, please provide a valid Order ID (e.g., ORD123)."
711
  raw_context_data_whatsapp = {"message": "Order ID needed or not found in query."}
@@ -713,7 +727,7 @@ if st.session_state.get("bot_started") and st.session_state.get("rag_pipeline_re
713
 
714
  elif intent_whatsapp == "PRODUCT_INFO":
715
  raw_context_data_whatsapp = get_product_info(user_query_whatsapp, st.session_state.products_data)
716
- context_for_llm_whatsapp = f"Product Information related to '{user_query_whatsapp}': {raw_context_data_whatsapp}"
717
 
718
  elif intent_whatsapp == "GENERAL_POLICY_FAQ" or intent_whatsapp == "UNKNOWN":
719
  if st.session_state.faiss_index_pdfs and st.session_state.embedding_model and st.session_state.indexed_pdf_chunks:
 
15
  st.set_page_config(page_title="RAG Customer Support Chatbot", layout="wide")
16
 
17
  # --- Default Configurations & File Paths ---
18
+ DEFAULT_TWILIO_ACCOUNT_SID_FALLBACK = ""
19
+ DEFAULT_TWILIO_AUTH_TOKEN_FALLBACK = ""
20
+ DEFAULT_GROQ_API_KEY_FALLBACK = ""
21
 
22
  DEFAULT_TWILIO_CONVERSATION_SERVICE_SID = ""
23
+ DEFAULT_TWILIO_BOT_WHATSAPP_IDENTITY = st.secrets.get("TWILIO_PHONE_NUMBER", "whatsapp:+14155238886")
24
  DEFAULT_EMBEDDING_MODEL_NAME = "sentence-transformers/all-MiniLM-L6-v2"
25
  DEFAULT_POLLING_INTERVAL_S = 30
26
  DOCS_FOLDER = "docs/"
 
30
  FAQ_PDF_FILE = os.path.join(DOCS_FOLDER, "FAQ.pdf")
31
 
32
  # --- Application Secrets Configuration ---
 
33
  APP_TWILIO_ACCOUNT_SID = st.secrets.get("TWILIO_ACCOUNT_SID")
34
  APP_TWILIO_AUTH_TOKEN = st.secrets.get("TWILIO_AUTH_TOKEN")
35
  APP_GROQ_API_KEY = st.secrets.get("GROQ_API_KEY")
36
 
 
37
  APP_TWILIO_CONVERSATION_SERVICE_SID_SECRET = st.secrets.get("TWILIO_CONVERSATION_SERVICE_SID")
38
  APP_TWILIO_BOT_WHATSAPP_IDENTITY_SECRET = st.secrets.get("TWILIO_BOT_WHATSAPP_IDENTITY")
39
 
40
 
41
  # --- RAG Processing Utilities ---
42
  def load_json_data(file_path):
 
43
  try:
44
  with open(file_path, 'r', encoding='utf-8') as f:
45
  data = json.load(f)
 
55
  return None
56
 
57
  def load_pdf_data(file_path):
 
58
  try:
59
  with open(file_path, 'rb') as f:
60
  reader = PyPDF2.PdfReader(f)
 
71
  return []
72
 
73
  def chunk_text(text_pages, chunk_size=1000, chunk_overlap=200):
 
74
  full_text = "\n".join(text_pages)
75
  if not full_text.strip():
76
  return []
 
88
 
89
  @st.cache_resource(show_spinner="Initializing embedding model...")
90
  def initialize_embedding_model(model_name=DEFAULT_EMBEDDING_MODEL_NAME):
 
91
  try:
92
  model = SentenceTransformer(model_name)
93
  return model
 
97
 
98
  @st.cache_resource(show_spinner="Building FAISS index for PDF documents...")
99
  def create_faiss_index(_text_chunks, _embedding_model):
 
100
  if not _text_chunks or _embedding_model is None:
101
  st.warning("Cannot create FAISS index: No text chunks or embedding model available.")
102
  return None, []
 
120
  return None, []
121
 
122
  def search_faiss_index(index, query_text, embedding_model, indexed_chunks, k=3):
 
123
  if index is None or embedding_model is None or not query_text:
124
  return []
125
  try:
 
138
  return []
139
 
140
  def get_order_details(order_id, customer_orders_data):
 
141
  if not customer_orders_data:
142
  return "Customer order data is not loaded."
143
  for order in customer_orders_data:
 
146
  return f"No order found with ID: {order_id}."
147
 
148
  def get_product_info(query, products_data):
 
149
  if not products_data:
150
  st.warning("Product data is not loaded or is empty in get_product_info.")
151
  return "Product data is not loaded."
 
185
  def generate_response_groq(_groq_client, query, context, model="llama3-8b-8192",
186
  intent=None, customer_name=None, item_name=None,
187
  shipping_address=None, delivery_date=None, order_id=None, order_status=None):
 
 
 
 
 
188
  if not _groq_client:
189
  return "GROQ client not initialized. Please check API key."
190
  if not query:
 
194
  user_prompt = ""
195
 
196
  if intent == "ORDER_STATUS" and order_id and customer_name and order_status:
197
+ system_message = (
198
+ f"You are an exceptionally friendly and helpful customer support assistant. "
199
+ f"Your current task is to provide a single, complete, and human-like sentence as a response to {customer_name} "
200
+ f"about their order {order_id}. You MUST incorporate all relevant order details provided into this single sentence."
201
+ )
202
 
203
+ item_description = item_name if item_name else "the ordered item(s)"
204
 
205
+ # Construct the core information string that the LLM needs to build upon
206
+ core_info_parts = [
207
+ f"your order {order_id}",
208
+ f"for {item_description}",
209
+ f"has a status of '{order_status}'"
210
+ ]
211
 
212
  if order_status.lower() == "delivered":
213
+ if shipping_address:
214
+ core_info_parts.append(f"and was delivered to {shipping_address}")
215
+ else:
216
+ core_info_parts.append("and was delivered (address not specified)")
217
+ if delivery_date:
218
+ core_info_parts.append(f"on {delivery_date}")
 
 
 
 
219
  else:
220
+ core_info_parts.append("(delivery date not specified)")
 
 
 
221
 
222
+ core_information_to_include = ", ".join(core_info_parts[:-1]) + (f" {core_info_parts[-1]}" if len(core_info_parts) > 1 else "")
223
+ if not order_status.lower() == "delivered" and len(core_info_parts) > 1 : # for non-delivered, avoid 'and' before status
224
+ core_information_to_include = f"your order {order_id} for {item_description} has a status of '{order_status}'"
225
+
226
+
227
+ user_prompt = (
228
+ f"Customer: {customer_name}\n"
229
+ f"Order ID: {order_id}\n"
230
+ f"Item(s): {item_description}\n"
231
+ f"Status: {order_status}\n"
232
+ )
233
+ if order_status.lower() == "delivered":
234
+ user_prompt += f"Shipping Address: {shipping_address if shipping_address else 'Not specified'}\n"
235
+ user_prompt += f"Delivered On: {delivery_date if delivery_date else 'Not specified'}\n"
236
+
237
+ user_prompt += f"\nOriginal user query for context: '{query}'\n\n"
238
+ user_prompt += (
239
+ f"Your task: Generate a single, complete, and human-like sentence that starts with a greeting to {customer_name}. "
240
+ f"This sentence MUST convey the following essential information: {core_information_to_include}.\n"
241
+ f"For example, if all details are present for a delivered order: 'Hi {customer_name}, {core_information_to_include}.'\n"
242
+ f"For example, for a non-delivered order: 'Hi {customer_name}, {core_information_to_include}.'\n"
243
+ f"IMPORTANT: Do not ask questions. Do not add any extra conversational fluff. Just provide the single, informative sentence as requested. "
244
+ f"Ensure the sentence flows naturally and uses the details you've been given.\n"
245
+ f"Respond now with ONLY that single sentence."
246
+ )
247
+ # For LLM's deeper reference, though the primary instruction is above:
248
+ # user_prompt += f"\n\nFull database context for your reference if needed: {context}"
249
 
250
  else: # Default prompt structure for other intents or if details are missing
251
  system_message = "You are a helpful customer support assistant."
 
266
  {"role": "system", "content": system_message},
267
  {"role": "user", "content": user_prompt}
268
  ],
269
+ model=model,
270
+ temperature=0.5, # Slightly lower temperature might help with stricter adherence
271
+ max_tokens=1024,
272
+ top_p=1
273
  )
274
+ response = chat_completion.choices[0].message.content.strip() # Added strip()
275
  return response
276
  except Exception as e:
277
  st.error(f"Error calling GROQ API: {e}")
 
279
 
280
 
281
  def initialize_groq_client(api_key_val):
 
282
  if not api_key_val:
283
  st.warning("GROQ API Key is missing.")
284
  return None
 
291
 
292
  # --- Twilio Operations ---
293
  def initialize_twilio_client(acc_sid, auth_tkn):
 
294
  if not acc_sid or not auth_tkn:
295
  st.warning("Twilio Account SID or Auth Token is missing.")
296
  return None
 
303
 
304
  def get_new_whatsapp_messages(twilio_client, conversation_service_sid_val, bot_start_time_utc,
305
  processed_message_sids, bot_whatsapp_identity_val):
 
306
  if not twilio_client:
307
  st.warning("Twilio client not initialized.")
308
  return []
 
345
  return sorted(new_messages_to_process, key=lambda m: m['timestamp_utc'])
346
 
347
  def send_whatsapp_message(twilio_client, conversation_service_sid_val, conversation_sid, message_body, bot_identity_val):
 
348
  if not twilio_client:
349
  st.error("Twilio client not initialized for sending message.")
350
  return False
 
557
 
558
  context_for_llm, raw_context_data = "No specific context could be retrieved.", None
559
 
560
+ extracted_customer_name, extracted_item_name, extracted_shipping_address, \
561
+ extracted_delivery_date, extracted_order_id, extracted_order_status = [None] * 6
562
+
 
 
 
 
563
 
564
  if intent == "ORDER_STATUS":
565
  order_id_to_check = None
 
572
 
573
  if order_id_to_check:
574
  raw_context_data = get_order_details(order_id_to_check, st.session_state.customer_orders_data)
575
+ # context_for_llm will be used as the 'context' parameter in generate_response_groq
576
+ # For ORDER_STATUS, this raw_context_data (JSON string) is still useful for LLM's reference,
577
+ # even though specific fields are extracted for the specialized prompt.
578
+ context_for_llm = raw_context_data
579
+
580
+ if isinstance(raw_context_data, str) and not raw_context_data.startswith("No order found") and not raw_context_data.startswith("Customer order data is not loaded"):
581
  try:
582
  order_data_dict = json.loads(raw_context_data)
583
  extracted_customer_name = order_data_dict.get("customer_name")
 
585
  if items and len(items) > 0 and isinstance(items[0], dict):
586
  extracted_item_name = items[0].get("name", "your item(s)")
587
  else:
588
+ extracted_item_name = "your item(s)" # Fallback
589
  extracted_shipping_address = order_data_dict.get("shipping_address")
590
  extracted_delivery_date = order_data_dict.get("delivered_on")
591
  extracted_order_status = order_data_dict.get("status")
592
+ extracted_order_id = order_data_dict.get("order_id") # Should be same as order_id_to_check
593
  except json.JSONDecodeError:
594
  st.warning(f"Could not parse order details JSON for {order_id_to_check} for personalization.")
595
+ context_for_llm = f"Error parsing order details for {order_id_to_check}. Raw data: {raw_context_data}"
596
+ elif isinstance(raw_context_data, str): # Handle "No order found" or "data not loaded"
597
+ context_for_llm = raw_context_data # LLM will state this
598
  else:
599
  context_for_llm = "To check an order status, please provide a valid Order ID (e.g., ORD123)."
600
  raw_context_data = {"message": "Order ID needed or not found in query."}
601
 
602
  elif intent == "PRODUCT_INFO":
603
  raw_context_data = get_product_info(user_query_manual, st.session_state.products_data)
604
+ context_for_llm = raw_context_data # Product info is directly used as context
605
 
606
  elif intent == "GENERAL_POLICY_FAQ" or intent == "UNKNOWN":
607
  if st.session_state.faiss_index_pdfs and st.session_state.embedding_model and st.session_state.indexed_pdf_chunks:
 
621
  llm_response = generate_response_groq(
622
  _groq_client=st.session_state.groq_client,
623
  query=user_query_manual,
624
+ context=context_for_llm,
625
  intent=intent,
626
  customer_name=extracted_customer_name,
627
  item_name=extracted_item_name,
628
  shipping_address=extracted_shipping_address,
629
  delivery_date=extracted_delivery_date,
630
+ order_id=extracted_order_id, # This will be the specific order ID from user query
631
  order_status=extracted_order_status
632
  )
633
 
 
684
 
685
  context_for_llm_whatsapp = "No specific context could be retrieved."
686
  raw_context_data_whatsapp = None
687
+
688
+ wa_customer_name, wa_item_name, wa_shipping_address, \
689
+ wa_delivery_date, wa_order_id, wa_order_status = [None] * 6
690
 
 
 
691
 
692
  if intent_whatsapp == "ORDER_STATUS":
693
  order_id_to_check_whatsapp = None
 
700
 
701
  if order_id_to_check_whatsapp:
702
  raw_context_data_whatsapp = get_order_details(order_id_to_check_whatsapp, st.session_state.customer_orders_data)
703
+ context_for_llm_whatsapp = raw_context_data_whatsapp # Full JSON string as context
704
+
705
+ if isinstance(raw_context_data_whatsapp, str) and not raw_context_data_whatsapp.startswith("No order found") and not raw_context_data_whatsapp.startswith("Customer order data is not loaded"):
706
  try:
707
  order_data_dict_wa = json.loads(raw_context_data_whatsapp)
708
  wa_customer_name = order_data_dict_wa.get("customer_name")
 
717
  wa_order_id = order_data_dict_wa.get("order_id")
718
  except json.JSONDecodeError:
719
  st.warning(f"Could not parse order details JSON for {order_id_to_check_whatsapp} (WhatsApp) for personalization.")
720
+ context_for_llm_whatsapp = f"Error parsing order details for {order_id_to_check_whatsapp}. Raw data: {raw_context_data_whatsapp}"
721
+ elif isinstance(raw_context_data_whatsapp, str):
722
+ context_for_llm_whatsapp = raw_context_data_whatsapp
723
  else:
724
  context_for_llm_whatsapp = "To check an order status, please provide a valid Order ID (e.g., ORD123)."
725
  raw_context_data_whatsapp = {"message": "Order ID needed or not found in query."}
 
727
 
728
  elif intent_whatsapp == "PRODUCT_INFO":
729
  raw_context_data_whatsapp = get_product_info(user_query_whatsapp, st.session_state.products_data)
730
+ context_for_llm_whatsapp = raw_context_data_whatsapp
731
 
732
  elif intent_whatsapp == "GENERAL_POLICY_FAQ" or intent_whatsapp == "UNKNOWN":
733
  if st.session_state.faiss_index_pdfs and st.session_state.embedding_model and st.session_state.indexed_pdf_chunks: