masadonline commited on
Commit
c9d8fa5
·
verified ·
1 Parent(s): b585e5b

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +74 -138
app.py CHANGED
@@ -23,16 +23,18 @@ import re
23
  APP_START_TIME = datetime.datetime.now(datetime.timezone.utc)
24
  os.environ["PYTORCH_JIT"] = "0"
25
 
 
 
 
 
 
26
  # ---------------- PDF & DOCX & JSON Extraction ----------------
27
  def _extract_tables_from_page(page):
28
  tables = page.extract_tables()
29
  formatted_tables = []
30
  for table in tables:
31
- formatted_table = []
32
- for row in table:
33
- formatted_row = [cell if cell is not None else "" for cell in row]
34
- formatted_table.append(formatted_row)
35
- formatted_tables.append(formatted_table)
36
  return formatted_tables
37
 
38
  def extract_text_from_pdf(pdf_path):
@@ -46,48 +48,40 @@ def extract_text_from_pdf(pdf_path):
46
  if text:
47
  text_output.write(text + "\n\n")
48
  except Exception as e:
49
- print(f"pdfplumber error: {e}")
50
  with open(pdf_path, 'rb') as file:
51
  extract_text_to_fp(file, text_output, laparams=LAParams(), output_type='text')
52
  return text_output.getvalue(), all_tables
53
 
54
  def _format_tables_internal(tables):
55
- formatted_tables_str = []
56
  for table in tables:
57
  with StringIO() as csvfile:
58
  writer = csv.writer(csvfile)
59
  writer.writerows(table)
60
- formatted_tables_str.append(csvfile.getvalue())
61
- return "\n\n".join(formatted_tables_str)
62
 
63
  def clean_extracted_text(text):
64
  return '\n'.join(' '.join(line.strip().split()) for line in text.splitlines() if line.strip())
65
 
66
- def extract_text_from_docx(docx_path):
67
  try:
68
- doc = docx.Document(docx_path)
69
- return '\n'.join(para.text for para in doc.paragraphs)
70
  except:
71
  return ""
72
 
73
- def load_json_data(json_path):
74
  try:
75
- with open(json_path, 'r', encoding='utf-8') as f:
76
  data = json.load(f)
77
  if isinstance(data, dict):
78
- # Flatten dictionary values (avoiding nested structures as strings)
79
- return "\n".join(f"{key}: {value}" for key, value in data.items() if not isinstance(value, (dict, list)))
80
  elif isinstance(data, list):
81
- # Flatten list of dictionaries
82
- all_items = []
83
- for item in data:
84
- if isinstance(item, dict):
85
- all_items.append("\n".join(f"{key}: {value}" for key, value in item.items() if not isinstance(value, (dict, list))))
86
- return "\n\n".join(all_items)
87
  else:
88
  return json.dumps(data, ensure_ascii=False, indent=2)
89
  except Exception as e:
90
- print(f"JSON read error: {e}")
91
  return ""
92
 
93
  # ---------------- Chunking ----------------
@@ -99,7 +93,6 @@ def chunk_text(text, tokenizer, chunk_size=128, chunk_overlap=32):
99
  end = min(start + chunk_size, len(tokens))
100
  chunk = tokens[start:end]
101
  chunks.append(tokenizer.convert_tokens_to_string(chunk))
102
- if end == len(tokens): break
103
  start += chunk_size - chunk_overlap
104
  return chunks
105
 
@@ -112,36 +105,18 @@ def retrieve_chunks(question, index, embed_model, text_chunks, k=3):
112
  def generate_answer_with_groq(question, context):
113
  url = "https://api.groq.com/openai/v1/chat/completions"
114
  api_key = os.environ.get("GROQ_API_KEY")
115
- headers = {
116
- "Authorization": f"Bearer {api_key}",
117
- "Content-Type": "application/json",
118
- }
119
- prompt = (
120
- f"Customer asked: '{question}'\n\n"
121
- f"Here is the relevant information to help:\n{context}\n\n"
122
- f"Respond in a friendly and helpful tone as a toy shop support agent, "
123
- f"addressing the customer by their name if it's available in the context."
124
- )
125
  payload = {
126
  "model": "llama3-8b-8192",
127
  "messages": [
128
- {
129
- "role": "system",
130
- "content": (
131
- "You are ToyBot, a friendly WhatsApp assistant for an online toy shop. "
132
- "Help customers with toys, delivery, and returns in a helpful tone. "
133
- "When responding, try to find the customer's name in the provided context "
134
- "and address them directly. If the context contains order details and status, "
135
- "include that information in your response."
136
- )
137
- },
138
  {"role": "user", "content": prompt},
139
  ],
140
  "temperature": 0.5,
141
  "max_tokens": 300,
142
  }
143
  response = requests.post(url, headers=headers, json=payload)
144
- response.raise_for_status()
145
  return response.json()['choices'][0]['message']['content'].strip()
146
 
147
  # ---------------- Twilio Integration ----------------
@@ -150,101 +125,62 @@ def fetch_latest_incoming_message(client, conversation_sid):
150
  messages = client.conversations.v1.conversations(conversation_sid).messages.list()
151
  for msg in reversed(messages):
152
  if msg.author.startswith("whatsapp:"):
153
- return {
154
- "sid": msg.sid,
155
- "body": msg.body,
156
- "author": msg.author,
157
- "timestamp": msg.date_created,
158
- }
159
- except TwilioRestException as e:
160
- print(f"Twilio error: {e}")
161
- return None
162
 
163
  def send_twilio_message(client, conversation_sid, body):
164
- return client.conversations.v1.conversations(conversation_sid).messages.create(
165
- author="system", body=body
166
- )
167
 
168
  # ---------------- Knowledge Base Setup ----------------
169
  def setup_knowledge_base():
170
- folder_path = "docs"
171
- all_text = ""
172
-
173
- for filename in os.listdir(folder_path):
174
- file_path = os.path.join(folder_path, filename)
175
- if filename.endswith(".pdf"):
176
- text, tables = extract_text_from_pdf(file_path)
177
- all_text += clean_extracted_text(text) + "\n"
178
- all_text += _format_tables_internal(tables) + "\n"
179
- elif filename.endswith(".docx"):
180
- text = extract_text_from_docx(file_path)
181
- all_text += clean_extracted_text(text) + "\n"
182
- elif filename.endswith(".json"):
183
- text = load_json_data(file_path)
184
- all_text += text + "\n"
185
- elif filename.endswith(".csv"):
186
- try:
187
- with open(file_path, newline='', encoding='utf-8') as csvfile:
188
- reader = csv.DictReader(csvfile)
189
- for row in reader:
190
- line = ' | '.join(f"{k}: {v}" for k, v in row.items())
191
- all_text += line + "\n"
192
- except Exception as e:
193
- print(f"CSV read error: {e}")
194
-
195
- tokenizer = AutoTokenizer.from_pretrained('bert-base-uncased')
196
- chunks = chunk_text(all_text, tokenizer)
197
- model = SentenceTransformer('all-mpnet-base-v2')
198
- embeddings = model.encode(chunks, show_progress_bar=False)
199
- dim = embeddings[0].shape[0]
200
- index = faiss.IndexFlatL2(dim)
201
- index.add(np.array(embeddings).astype('float32'))
202
- return index, model, chunks
203
-
204
- # ---------------- Monitor Twilio Conversations ----------------
205
- def start_conversation_monitor(client, index, embed_model, text_chunks):
206
- processed_convos = set()
207
- last_processed_timestamp = {}
208
-
209
- def poll_convo(convo_sid):
210
- while True:
211
- latest_msg = fetch_latest_incoming_message(client, convo_sid)
212
- if latest_msg:
213
- msg_time = latest_msg["timestamp"]
214
- if msg_time > APP_START_TIME:
215
- if convo_sid not in last_processed_timestamp or msg_time > last_processed_timestamp[convo_sid]:
216
- last_processed_timestamp[convo_sid] = msg_time
217
- question = latest_msg["body"]
218
- sender = latest_msg["author"]
219
- print(f"📩 New message from {sender}: {question}")
220
- context = "\n\n".join(retrieve_chunks(question, index, embed_model, text_chunks))
221
- answer = generate_answer_with_groq(question, context)
222
- send_twilio_message(client, convo_sid, answer)
223
- time.sleep(5)
224
-
225
- for convo in client.conversations.v1.conversations.list():
226
- # convo.date_created is a datetime object in UTC, compare it with APP_START_TIME
227
- if convo.date_created is not None and convo.date_created > APP_START_TIME:
228
- if convo.sid not in processed_convos:
229
- processed_convos.add(convo.sid)
230
- threading.Thread(target=poll_convo, args=(convo.sid,), daemon=True).start()
231
-
232
-
233
- # ---------------- Main Entry ----------------
234
- if __name__ == "__main__":
235
- st.title("🤖 ToyBot WhatsApp Assistant")
236
- st.write("Initializing knowledge base...")
237
-
238
- index, model, chunks = setup_knowledge_base()
239
-
240
- st.success("Knowledge base loaded.")
241
- st.write("Waiting for WhatsApp messages...")
242
-
243
- account_sid = os.environ.get("TWILIO_ACCOUNT_SID")
244
- auth_token = os.environ.get("TWILIO_AUTH_TOKEN")
245
- if not account_sid or not auth_token:
246
- st.error("❌ Twilio credentials not set.")
247
- else:
248
- client = Client(account_sid, auth_token)
249
- start_conversation_monitor(client, index, model, chunks)
250
- st.info("✅ Bot is now monitoring Twilio conversations.")
 
23
  APP_START_TIME = datetime.datetime.now(datetime.timezone.utc)
24
  os.environ["PYTORCH_JIT"] = "0"
25
 
26
+ # Twilio Setup
27
+ TWILIO_ACCOUNT_SID = os.getenv("TWILIO_ACCOUNT_SID")
28
+ TWILIO_AUTH_TOKEN = os.getenv("TWILIO_AUTH_TOKEN")
29
+ twilio_client = Client(TWILIO_ACCOUNT_SID, TWILIO_AUTH_TOKEN)
30
+
31
  # ---------------- PDF & DOCX & JSON Extraction ----------------
32
  def _extract_tables_from_page(page):
33
  tables = page.extract_tables()
34
  formatted_tables = []
35
  for table in tables:
36
+ formatted_row = [[cell if cell is not None else "" for cell in row] for row in table]
37
+ formatted_tables.append(formatted_row)
 
 
 
38
  return formatted_tables
39
 
40
  def extract_text_from_pdf(pdf_path):
 
48
  if text:
49
  text_output.write(text + "\n\n")
50
  except Exception as e:
 
51
  with open(pdf_path, 'rb') as file:
52
  extract_text_to_fp(file, text_output, laparams=LAParams(), output_type='text')
53
  return text_output.getvalue(), all_tables
54
 
55
  def _format_tables_internal(tables):
56
+ formatted = []
57
  for table in tables:
58
  with StringIO() as csvfile:
59
  writer = csv.writer(csvfile)
60
  writer.writerows(table)
61
+ formatted.append(csvfile.getvalue())
62
+ return "\n\n".join(formatted)
63
 
64
  def clean_extracted_text(text):
65
  return '\n'.join(' '.join(line.strip().split()) for line in text.splitlines() if line.strip())
66
 
67
+ def extract_text_from_docx(path):
68
  try:
69
+ doc = docx.Document(path)
70
+ return '\n'.join(p.text for p in doc.paragraphs)
71
  except:
72
  return ""
73
 
74
+ def load_json_data(path):
75
  try:
76
+ with open(path, 'r', encoding='utf-8') as f:
77
  data = json.load(f)
78
  if isinstance(data, dict):
79
+ return "\n".join(f"{k}: {v}" for k, v in data.items() if not isinstance(v, (dict, list)))
 
80
  elif isinstance(data, list):
81
+ return "\n\n".join("\n".join(f"{k}: {v}" for k, v in item.items() if not isinstance(v, (dict, list))) for item in data if isinstance(item, dict))
 
 
 
 
 
82
  else:
83
  return json.dumps(data, ensure_ascii=False, indent=2)
84
  except Exception as e:
 
85
  return ""
86
 
87
  # ---------------- Chunking ----------------
 
93
  end = min(start + chunk_size, len(tokens))
94
  chunk = tokens[start:end]
95
  chunks.append(tokenizer.convert_tokens_to_string(chunk))
 
96
  start += chunk_size - chunk_overlap
97
  return chunks
98
 
 
105
  def generate_answer_with_groq(question, context):
106
  url = "https://api.groq.com/openai/v1/chat/completions"
107
  api_key = os.environ.get("GROQ_API_KEY")
108
+ headers = {"Authorization": f"Bearer {api_key}", "Content-Type": "application/json"}
109
+ prompt = f"Customer asked: '{question}'\n\nHere is the relevant information to help:\n{context}"
 
 
 
 
 
 
 
 
110
  payload = {
111
  "model": "llama3-8b-8192",
112
  "messages": [
113
+ {"role": "system", "content": "You are ToyBot, a friendly WhatsApp assistant..."},
 
 
 
 
 
 
 
 
 
114
  {"role": "user", "content": prompt},
115
  ],
116
  "temperature": 0.5,
117
  "max_tokens": 300,
118
  }
119
  response = requests.post(url, headers=headers, json=payload)
 
120
  return response.json()['choices'][0]['message']['content'].strip()
121
 
122
  # ---------------- Twilio Integration ----------------
 
125
  messages = client.conversations.v1.conversations(conversation_sid).messages.list()
126
  for msg in reversed(messages):
127
  if msg.author.startswith("whatsapp:"):
128
+ return {"sid": msg.sid, "body": msg.body, "author": msg.author, "timestamp": msg.date_created}
129
+ except TwilioRestException:
130
+ return None
 
 
 
 
 
 
131
 
132
  def send_twilio_message(client, conversation_sid, body):
133
+ return client.conversations.v1.conversations(conversation_sid).messages.create(author="system", body=body)
 
 
134
 
135
  # ---------------- Knowledge Base Setup ----------------
136
  def setup_knowledge_base():
137
+ folder = "docs"
138
+ text = ""
139
+ for f in os.listdir(folder):
140
+ path = os.path.join(folder, f)
141
+ if f.endswith(".pdf"):
142
+ t, tables = extract_text_from_pdf(path)
143
+ text += clean_extracted_text(t) + "\n" + _format_tables_internal(tables) + "\n"
144
+ elif f.endswith(".docx"):
145
+ text += clean_extracted_text(extract_text_from_docx(path)) + "\n"
146
+ elif f.endswith(".json"):
147
+ text += load_json_data(path) + "\n"
148
+ elif f.endswith(".csv"):
149
+ with open(path, newline='', encoding='utf-8') as csvfile:
150
+ reader = csv.DictReader(csvfile)
151
+ for row in reader:
152
+ text += ' | '.join(f"{k}: {v}" for k, v in row.items()) + "\n"
153
+ return text
154
+
155
+ # ---------------- Message Processing Loop ----------------
156
+ def process_messages_loop(conversation_sid, index, text_chunks, tokenizer, embed_model):
157
+ processed_sids = set()
158
+ while True:
159
+ message = fetch_latest_incoming_message(twilio_client, conversation_sid)
160
+ if message and message['sid'] not in processed_sids and message['timestamp'] > APP_START_TIME:
161
+ question = message['body']
162
+ relevant = retrieve_chunks(question, index, embed_model, text_chunks)
163
+ answer = generate_answer_with_groq(question, '\n'.join(relevant))
164
+ send_twilio_message(twilio_client, conversation_sid, answer)
165
+ processed_sids.add(message['sid'])
166
+ time.sleep(5)
167
+
168
+ # ---------------- Streamlit UI ----------------
169
+ st.title("📱 ToyShop WhatsApp Chatbot")
170
+ kb_text = setup_knowledge_base()
171
+ tokenizer = AutoTokenizer.from_pretrained("bert-base-uncased")
172
+ embed_model = SentenceTransformer("all-MiniLM-L6-v2")
173
+ chunks = chunk_text(kb_text, tokenizer)
174
+ embeddings = embed_model.encode(chunks)
175
+ index = faiss.IndexFlatL2(len(embeddings[0]))
176
+ index.add(np.array(embeddings))
177
+
178
+ # Automatically fetch conversation SID
179
+ conversations = twilio_client.conversations.v1.conversations.list(limit=5)
180
+ conversation_sid = conversations[0].sid if conversations else None
181
+
182
+ if conversation_sid:
183
+ st.success(f"Monitoring Twilio conversation SID: {conversation_sid}")
184
+ threading.Thread(target=process_messages_loop, args=(conversation_sid, index, chunks, tokenizer, embed_model), daemon=True).start()
185
+ else:
186
+ st.error("No active Twilio conversation found.")