broadfield-dev commited on
Commit
ae3c8d7
·
verified ·
1 Parent(s): e889bb7

Update memory_logic.py

Browse files
Files changed (1) hide show
  1. memory_logic.py +63 -128
memory_logic.py CHANGED
@@ -69,22 +69,17 @@ def _init_sqlite_tables():
69
  try:
70
  with _get_sqlite_connection() as conn:
71
  cursor = conn.cursor()
72
- # Stores JSON string of the memory object
73
  cursor.execute("""
74
  CREATE TABLE IF NOT EXISTS memories (
75
  id INTEGER PRIMARY KEY AUTOINCREMENT,
76
  memory_json TEXT NOT NULL,
77
- # Optionally add embedding here if not using separate FAISS index
78
- # embedding BLOB,
79
  created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
80
  )
81
  """)
82
- # Stores the rule text directly
83
  cursor.execute("""
84
  CREATE TABLE IF NOT EXISTS rules (
85
  id INTEGER PRIMARY KEY AUTOINCREMENT,
86
  rule_text TEXT NOT NULL UNIQUE,
87
- # embedding BLOB,
88
  created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
89
  )
90
  """)
@@ -93,7 +88,7 @@ def _init_sqlite_tables():
93
  except Exception as e:
94
  logger.error(f"SQLite table initialization error: {e}", exc_info=True)
95
 
96
- # --- Initialization ---
97
  def initialize_memory_system():
98
  global _initialized, _embedder, _dimension, _faiss_memory_index, _memory_items_list, _faiss_rules_index, _rules_items_list
99
 
@@ -105,10 +100,9 @@ def initialize_memory_system():
105
  logger.info(f"Initializing memory system with backend: {STORAGE_BACKEND}")
106
  init_start_time = time.time()
107
 
108
- # 1. Load Sentence Transformer Model (always needed for semantic operations)
109
  if not SentenceTransformer or not faiss or not np:
110
  logger.error("Core RAG libraries (SentenceTransformers, FAISS, NumPy) not available. Cannot initialize semantic memory.")
111
- _initialized = False # Mark as not properly initialized
112
  return
113
 
114
  if not _embedder:
@@ -120,14 +114,13 @@ def initialize_memory_system():
120
  except Exception as e:
121
  logger.critical(f"FATAL: Error loading SentenceTransformer: {e}", exc_info=True)
122
  _initialized = False
123
- return # Cannot proceed without embedder
124
 
125
- # 2. Initialize SQLite if used
126
  if STORAGE_BACKEND == "SQLITE":
127
  _init_sqlite_tables()
128
 
129
- # 3. Load Memories
130
- logger.info("Loading memories...")
131
  temp_memories_json = []
132
  if STORAGE_BACKEND == "RAM":
133
  pass
@@ -141,27 +134,27 @@ def initialize_memory_system():
141
  logger.info(f"Attempting to load memories from HF Dataset: {HF_MEMORY_DATASET_REPO}")
142
  dataset = load_dataset(HF_MEMORY_DATASET_REPO, token=HF_TOKEN, trust_remote_code=True)
143
  if "train" in dataset and "memory_json" in dataset["train"].column_names:
144
- temp_memories_json = [m_json for m_json in dataset["train"]["memory_json"] if isinstance(m_json, str)]
145
- else: logger.warning(f"HF Dataset {HF_MEMORY_DATASET_REPO} for memories not found or 'memory_json' column missing.")
146
- except Exception as e: logger.error(f"Error loading memories from HF Dataset ({HF_MEMORY_DATASET_REPO}): {e}")
 
 
 
 
147
 
148
  _memory_items_list = temp_memories_json
149
- logger.info(f"Loaded {len(_memory_items_list)} memory items from {STORAGE_BACKEND}.")
150
 
151
- # 4. Build/Load FAISS Memory Index
152
  _faiss_memory_index = faiss.IndexFlatL2(_dimension)
153
  if _memory_items_list:
154
  logger.info(f"Building FAISS index for {len(_memory_items_list)} memories...")
155
- # Extract text to embed from memory JSON objects
156
  texts_to_embed_mem = []
157
  for mem_json_str in _memory_items_list:
158
  try:
159
  mem_obj = json.loads(mem_json_str)
160
- # Consistent embedding strategy: user input + bot response + takeaway
161
  text = f"User: {mem_obj.get('user_input','')}\nAI: {mem_obj.get('bot_response','')}\nTakeaway: {mem_obj.get('metrics',{}).get('takeaway','N/A')}"
162
  texts_to_embed_mem.append(text)
163
- except json.JSONDecodeError:
164
- logger.warning(f"Skipping malformed memory JSON for FAISS indexing: {mem_json_str[:100]}")
165
 
166
  if texts_to_embed_mem:
167
  try:
@@ -173,9 +166,8 @@ def initialize_memory_system():
173
  except Exception as e_faiss_mem: logger.error(f"Error building FAISS memory index: {e_faiss_mem}")
174
  logger.info(f"FAISS memory index built. Total items: {_faiss_memory_index.ntotal if _faiss_memory_index else 'N/A'}")
175
 
176
-
177
- # 5. Load Rules
178
- logger.info("Loading rules...")
179
  temp_rules_text = []
180
  if STORAGE_BACKEND == "RAM":
181
  pass
@@ -189,18 +181,21 @@ def initialize_memory_system():
189
  logger.info(f"Attempting to load rules from HF Dataset: {HF_RULES_DATASET_REPO}")
190
  dataset = load_dataset(HF_RULES_DATASET_REPO, token=HF_TOKEN, trust_remote_code=True)
191
  if "train" in dataset and "rule_text" in dataset["train"].column_names:
 
 
192
  temp_rules_text = [r_text for r_text in dataset["train"]["rule_text"] if isinstance(r_text, str) and r_text.strip()]
193
- else: logger.warning(f"HF Dataset {HF_RULES_DATASET_REPO} for rules not found or 'rule_text' column missing.")
194
- except Exception as e: logger.error(f"Error loading rules from HF Dataset ({HF_RULES_DATASET_REPO}): {e}")
 
 
195
 
196
- _rules_items_list = sorted(list(set(temp_rules_text))) # Ensure unique and sorted
197
- logger.info(f"Loaded {len(_rules_items_list)} rule items from {STORAGE_BACKEND}.")
198
 
199
- # 6. Build/Load FAISS Rules Index
200
  _faiss_rules_index = faiss.IndexFlatL2(_dimension)
201
  if _rules_items_list:
202
  logger.info(f"Building FAISS index for {len(_rules_items_list)} rules...")
203
- if _rules_items_list: # Check again in case it became empty after filtering
204
  try:
205
  embeddings = _embedder.encode(_rules_items_list, convert_to_tensor=False, show_progress_bar=False)
206
  embeddings_np = np.array(embeddings, dtype=np.float32)
@@ -213,116 +208,90 @@ def initialize_memory_system():
213
  _initialized = True
214
  logger.info(f"Memory system initialization complete in {time.time() - init_start_time:.2f}s")
215
 
 
 
 
 
 
 
 
 
 
 
 
 
216
 
217
  # --- Memory Operations (Semantic) ---
218
  def add_memory_entry(user_input: str, metrics: dict, bot_response: str) -> tuple[bool, str]:
219
- """Adds a memory entry to the configured backend and FAISS index."""
220
  global _memory_items_list, _faiss_memory_index
221
- if not _initialized: initialize_memory_system()
222
  if not _embedder or not _faiss_memory_index:
223
  return False, "Memory system or embedder not initialized for adding memory."
224
-
225
- memory_obj = {
226
- "user_input": user_input,
227
- "metrics": metrics,
228
- "bot_response": bot_response,
229
- "timestamp": datetime.utcnow().isoformat()
230
- }
231
  memory_json_str = json.dumps(memory_obj)
232
-
233
  text_to_embed = f"User: {user_input}\nAI: {bot_response}\nTakeaway: {metrics.get('takeaway', 'N/A')}"
234
-
235
  try:
236
  embedding = _embedder.encode([text_to_embed], convert_to_tensor=False)
237
  embedding_np = np.array(embedding, dtype=np.float32).reshape(1, -1)
238
-
239
  if embedding_np.shape != (1, _dimension):
240
  logger.error(f"Memory embedding shape error: {embedding_np.shape}. Expected (1, {_dimension})")
241
  return False, "Embedding shape error."
242
-
243
- # Add to FAISS
244
  _faiss_memory_index.add(embedding_np)
245
-
246
- # Add to in-memory list
247
  _memory_items_list.append(memory_json_str)
248
-
249
- # Add to persistent storage
250
  if STORAGE_BACKEND == "SQLITE" and sqlite3:
251
  with _get_sqlite_connection() as conn:
252
  conn.execute("INSERT INTO memories (memory_json) VALUES (?)", (memory_json_str,))
253
  conn.commit()
254
  elif STORAGE_BACKEND == "HF_DATASET" and HF_TOKEN and Dataset:
255
- # This can be slow, consider batching or async push
256
  logger.info(f"Pushing {len(_memory_items_list)} memories to HF Hub: {HF_MEMORY_DATASET_REPO}")
257
- Dataset.from_dict({"memory_json": list(_memory_items_list)}).push_to_hub(HF_MEMORY_DATASET_REPO, token=HF_TOKEN, private=True) # Ensure 'private' as needed
258
-
259
  logger.info(f"Added memory. RAM: {len(_memory_items_list)}, FAISS: {_faiss_memory_index.ntotal}")
260
  return True, "Memory added successfully."
261
  except Exception as e:
262
  logger.error(f"Error adding memory entry: {e}", exc_info=True)
263
- # TODO: Potential rollback logic if FAISS add succeeded but backend failed (complex)
264
  return False, f"Error adding memory: {e}"
265
 
266
  def retrieve_memories_semantic(query: str, k: int = 3) -> list[dict]:
267
- """Retrieves k most relevant memories using semantic search."""
268
- if not _initialized: initialize_memory_system()
269
  if not _embedder or not _faiss_memory_index or _faiss_memory_index.ntotal == 0:
270
- logger.debug("Cannot retrieve memories: Embedder, FAISS index not ready, or index is empty.")
271
  return []
272
-
273
  try:
274
  query_embedding = _embedder.encode([query], convert_to_tensor=False)
275
  query_embedding_np = np.array(query_embedding, dtype=np.float32).reshape(1, -1)
276
-
277
  if query_embedding_np.shape[1] != _dimension:
278
  logger.error(f"Query embedding dimension mismatch. Expected {_dimension}, got {query_embedding_np.shape[1]}")
279
  return []
280
-
281
  distances, indices = _faiss_memory_index.search(query_embedding_np, min(k, _faiss_memory_index.ntotal))
282
-
283
  results = []
284
  for i in indices[0]:
285
  if 0 <= i < len(_memory_items_list):
286
- try:
287
- results.append(json.loads(_memory_items_list[i]))
288
- except json.JSONDecodeError:
289
- logger.warning(f"Could not parse memory JSON from list at index {i}")
290
- else:
291
- logger.warning(f"FAISS index {i} out of bounds for memory_items_list (len: {len(_memory_items_list)})")
292
-
293
  logger.debug(f"Retrieved {len(results)} memories semantically for query: '{query[:50]}...'")
294
  return results
295
  except Exception as e:
296
  logger.error(f"Error retrieving memories semantically: {e}", exc_info=True)
297
  return []
298
 
299
-
300
  # --- Rule (Insight) Operations (Semantic) ---
301
  def add_rule_entry(rule_text: str) -> tuple[bool, str]:
302
- """Adds a rule if valid and not a duplicate. Updates backend and FAISS."""
303
  global _rules_items_list, _faiss_rules_index
304
- if not _initialized: initialize_memory_system()
305
- if not _embedder or not _faiss_rules_index:
306
- return False, "Rule system or embedder not initialized."
307
-
308
  rule_text = rule_text.strip()
309
  if not rule_text: return False, "Rule text cannot be empty."
310
  if not re.match(r"\[(CORE_RULE|RESPONSE_PRINCIPLE|BEHAVIORAL_ADJUSTMENT|GENERAL_LEARNING)\|([\d\.]+?)\](.*)", rule_text, re.I|re.DOTALL):
311
  return False, "Invalid rule format."
312
- if rule_text in _rules_items_list:
313
- return False, "duplicate"
314
-
315
  try:
316
  embedding = _embedder.encode([rule_text], convert_to_tensor=False)
317
  embedding_np = np.array(embedding, dtype=np.float32).reshape(1, -1)
318
-
319
- if embedding_np.shape != (1, _dimension):
320
- return False, "Rule embedding shape error."
321
-
322
  _faiss_rules_index.add(embedding_np)
323
  _rules_items_list.append(rule_text)
324
  _rules_items_list.sort()
325
-
326
  if STORAGE_BACKEND == "SQLITE" and sqlite3:
327
  with _get_sqlite_connection() as conn:
328
  conn.execute("INSERT OR IGNORE INTO rules (rule_text) VALUES (?)", (rule_text,))
@@ -330,29 +299,21 @@ def add_rule_entry(rule_text: str) -> tuple[bool, str]:
330
  elif STORAGE_BACKEND == "HF_DATASET" and HF_TOKEN and Dataset:
331
  logger.info(f"Pushing {len(_rules_items_list)} rules to HF Hub: {HF_RULES_DATASET_REPO}")
332
  Dataset.from_dict({"rule_text": list(_rules_items_list)}).push_to_hub(HF_RULES_DATASET_REPO, token=HF_TOKEN, private=True)
333
-
334
  logger.info(f"Added rule. RAM: {len(_rules_items_list)}, FAISS: {_faiss_rules_index.ntotal}")
335
  return True, "Rule added successfully."
336
  except Exception as e:
337
  logger.error(f"Error adding rule entry: {e}", exc_info=True)
338
- # Basic rollback if FAISS add succeeded
339
- if rule_text in _rules_items_list and _faiss_rules_index.ntotal > 0: # Crude check
340
- # A full rollback would involve rebuilding FAISS index from _rules_items_list before append.
341
- # For simplicity, this is omitted here. State could be inconsistent on error.
342
- pass
343
  return False, f"Error adding rule: {e}"
344
 
345
  def retrieve_rules_semantic(query: str, k: int = 5) -> list[str]:
346
- """Retrieves k most relevant rules using semantic search."""
347
- if not _initialized: initialize_memory_system()
348
  if not _embedder or not _faiss_rules_index or _faiss_rules_index.ntotal == 0:
 
349
  return []
350
  try:
351
  query_embedding = _embedder.encode([query], convert_to_tensor=False)
352
  query_embedding_np = np.array(query_embedding, dtype=np.float32).reshape(1, -1)
353
-
354
  if query_embedding_np.shape[1] != _dimension: return []
355
-
356
  distances, indices = _faiss_rules_index.search(query_embedding_np, min(k, _faiss_rules_index.ntotal))
357
  results = [_rules_items_list[i] for i in indices[0] if 0 <= i < len(_rules_items_list)]
358
  logger.debug(f"Retrieved {len(results)} rules semantically for query: '{query[:50]}...'")
@@ -362,78 +323,62 @@ def retrieve_rules_semantic(query: str, k: int = 5) -> list[str]:
362
  return []
363
 
364
  def remove_rule_entry(rule_text_to_delete: str) -> bool:
365
- """Removes a rule from backend and rebuilds FAISS for rules."""
366
  global _rules_items_list, _faiss_rules_index
367
- if not _initialized: initialize_memory_system()
368
  if not _embedder or not _faiss_rules_index: return False
369
-
370
  rule_text_to_delete = rule_text_to_delete.strip()
371
- if rule_text_to_delete not in _rules_items_list:
372
- return False # Not found
373
-
374
  try:
375
  _rules_items_list.remove(rule_text_to_delete)
376
- _rules_items_list.sort() # Maintain sorted order
377
-
378
- # Rebuild FAISS index for rules (simplest way to ensure consistency after removal)
379
  new_faiss_rules_index = faiss.IndexFlatL2(_dimension)
380
  if _rules_items_list:
381
  embeddings = _embedder.encode(_rules_items_list, convert_to_tensor=False)
382
  embeddings_np = np.array(embeddings, dtype=np.float32)
383
  if embeddings_np.ndim == 2 and embeddings_np.shape[0] == len(_rules_items_list) and embeddings_np.shape[1] == _dimension:
384
  new_faiss_rules_index.add(embeddings_np)
385
- else: # Should not happen if list is consistent
386
  logger.error("Error rebuilding FAISS for rules after removal: Embedding shape error. State might be inconsistent.")
387
- # Attempt to revert _rules_items_list (add back the rule)
388
  _rules_items_list.append(rule_text_to_delete)
389
  _rules_items_list.sort()
390
- return False # Indicate failure
391
  _faiss_rules_index = new_faiss_rules_index
392
-
393
- # Remove from persistent storage
394
  if STORAGE_BACKEND == "SQLITE" and sqlite3:
395
  with _get_sqlite_connection() as conn:
396
  conn.execute("DELETE FROM rules WHERE rule_text = ?", (rule_text_to_delete,))
397
  conn.commit()
398
  elif STORAGE_BACKEND == "HF_DATASET" and HF_TOKEN and Dataset:
399
  Dataset.from_dict({"rule_text": list(_rules_items_list)}).push_to_hub(HF_RULES_DATASET_REPO, token=HF_TOKEN, private=True)
400
-
401
  logger.info(f"Removed rule. RAM: {len(_rules_items_list)}, FAISS: {_faiss_rules_index.ntotal}")
402
  return True
403
  except Exception as e:
404
  logger.error(f"Error removing rule entry: {e}", exc_info=True)
405
- # Potential partial failure, state might be inconsistent.
406
  return False
407
 
408
  # --- Utility functions to get all data (for UI display, etc.) ---
409
  def get_all_rules_cached() -> list[str]:
410
- if not _initialized: initialize_memory_system()
411
  return list(_rules_items_list)
412
 
413
  def get_all_memories_cached() -> list[dict]:
414
- if not _initialized: initialize_memory_system()
415
- # Convert JSON strings to dicts for easier use by UI
416
  mem_dicts = []
417
  for mem_json_str in _memory_items_list:
418
  try: mem_dicts.append(json.loads(mem_json_str))
419
- except: pass # Ignore parse errors for display
420
  return mem_dicts
421
 
422
  def clear_all_memory_data_backend() -> bool:
423
- """Clears all memories from backend and resets in-memory FAISS/list."""
424
  global _memory_items_list, _faiss_memory_index
425
- if not _initialized: initialize_memory_system()
426
-
427
  success = True
428
  try:
429
  if STORAGE_BACKEND == "SQLITE" and sqlite3:
430
  with _get_sqlite_connection() as conn: conn.execute("DELETE FROM memories"); conn.commit()
431
  elif STORAGE_BACKEND == "HF_DATASET" and HF_TOKEN and Dataset:
432
- # Deleting from HF usually means pushing an empty dataset
433
  Dataset.from_dict({"memory_json": []}).push_to_hub(HF_MEMORY_DATASET_REPO, token=HF_TOKEN, private=True)
434
-
435
  _memory_items_list = []
436
- if _faiss_memory_index: _faiss_memory_index.reset() # Clear FAISS index
437
  logger.info("All memories cleared from backend and in-memory stores.")
438
  except Exception as e:
439
  logger.error(f"Error clearing all memory data: {e}")
@@ -441,17 +386,14 @@ def clear_all_memory_data_backend() -> bool:
441
  return success
442
 
443
  def clear_all_rules_data_backend() -> bool:
444
- """Clears all rules from backend and resets in-memory FAISS/list."""
445
  global _rules_items_list, _faiss_rules_index
446
- if not _initialized: initialize_memory_system()
447
-
448
  success = True
449
  try:
450
  if STORAGE_BACKEND == "SQLITE" and sqlite3:
451
  with _get_sqlite_connection() as conn: conn.execute("DELETE FROM rules"); conn.commit()
452
  elif STORAGE_BACKEND == "HF_DATASET" and HF_TOKEN and Dataset:
453
  Dataset.from_dict({"rule_text": []}).push_to_hub(HF_RULES_DATASET_REPO, token=HF_TOKEN, private=True)
454
-
455
  _rules_items_list = []
456
  if _faiss_rules_index: _faiss_rules_index.reset()
457
  logger.info("All rules cleared from backend and in-memory stores.")
@@ -460,22 +402,18 @@ def clear_all_rules_data_backend() -> bool:
460
  success = False
461
  return success
462
 
463
- # Optional: Function to save FAISS indices to disk (from ai-learn, if needed for persistence between app runs with RAM backend)
464
  FAISS_MEMORY_PATH = os.path.join(os.getenv("FAISS_STORAGE_PATH", "app_data/faiss_indices"), "memory_index.faiss")
465
  FAISS_RULES_PATH = os.path.join(os.getenv("FAISS_STORAGE_PATH", "app_data/faiss_indices"), "rules_index.faiss")
466
 
467
  def save_faiss_indices_to_disk():
468
  if not _initialized or not faiss: return
469
-
470
  faiss_dir = os.path.dirname(FAISS_MEMORY_PATH)
471
  if not os.path.exists(faiss_dir): os.makedirs(faiss_dir, exist_ok=True)
472
-
473
  if _faiss_memory_index and _faiss_memory_index.ntotal > 0:
474
  try:
475
  faiss.write_index(_faiss_memory_index, FAISS_MEMORY_PATH)
476
  logger.info(f"Memory FAISS index saved to disk ({_faiss_memory_index.ntotal} items).")
477
  except Exception as e: logger.error(f"Error saving memory FAISS index: {e}")
478
-
479
  if _faiss_rules_index and _faiss_rules_index.ntotal > 0:
480
  try:
481
  faiss.write_index(_faiss_rules_index, FAISS_RULES_PATH)
@@ -485,17 +423,14 @@ def save_faiss_indices_to_disk():
485
  def load_faiss_indices_from_disk():
486
  global _faiss_memory_index, _faiss_rules_index
487
  if not _initialized or not faiss: return
488
-
489
- if os.path.exists(FAISS_MEMORY_PATH) and _faiss_memory_index: # Check if index object exists
490
  try:
491
  logger.info(f"Loading memory FAISS index from {FAISS_MEMORY_PATH}...")
492
  _faiss_memory_index = faiss.read_index(FAISS_MEMORY_PATH)
493
  logger.info(f"Memory FAISS index loaded ({_faiss_memory_index.ntotal} items).")
494
- # Consistency check: FAISS ntotal vs len(_memory_items_list)
495
  if _faiss_memory_index.ntotal != len(_memory_items_list) and len(_memory_items_list) > 0:
496
  logger.warning(f"Memory FAISS index count ({_faiss_memory_index.ntotal}) differs from loaded texts ({len(_memory_items_list)}). Consider rebuilding FAISS.")
497
  except Exception as e: logger.error(f"Error loading memory FAISS index: {e}. Will use fresh index.")
498
-
499
  if os.path.exists(FAISS_RULES_PATH) and _faiss_rules_index:
500
  try:
501
  logger.info(f"Loading rules FAISS index from {FAISS_RULES_PATH}...")
 
69
  try:
70
  with _get_sqlite_connection() as conn:
71
  cursor = conn.cursor()
 
72
  cursor.execute("""
73
  CREATE TABLE IF NOT EXISTS memories (
74
  id INTEGER PRIMARY KEY AUTOINCREMENT,
75
  memory_json TEXT NOT NULL,
 
 
76
  created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
77
  )
78
  """)
 
79
  cursor.execute("""
80
  CREATE TABLE IF NOT EXISTS rules (
81
  id INTEGER PRIMARY KEY AUTOINCREMENT,
82
  rule_text TEXT NOT NULL UNIQUE,
 
83
  created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
84
  )
85
  """)
 
88
  except Exception as e:
89
  logger.error(f"SQLite table initialization error: {e}", exc_info=True)
90
 
91
+ # --- Initialization and State Management ---
92
  def initialize_memory_system():
93
  global _initialized, _embedder, _dimension, _faiss_memory_index, _memory_items_list, _faiss_rules_index, _rules_items_list
94
 
 
100
  logger.info(f"Initializing memory system with backend: {STORAGE_BACKEND}")
101
  init_start_time = time.time()
102
 
 
103
  if not SentenceTransformer or not faiss or not np:
104
  logger.error("Core RAG libraries (SentenceTransformers, FAISS, NumPy) not available. Cannot initialize semantic memory.")
105
+ _initialized = False
106
  return
107
 
108
  if not _embedder:
 
114
  except Exception as e:
115
  logger.critical(f"FATAL: Error loading SentenceTransformer: {e}", exc_info=True)
116
  _initialized = False
117
+ return
118
 
 
119
  if STORAGE_BACKEND == "SQLITE":
120
  _init_sqlite_tables()
121
 
122
+ # Load Memories
123
+ logger.info("Loading memories from persistent storage...")
124
  temp_memories_json = []
125
  if STORAGE_BACKEND == "RAM":
126
  pass
 
134
  logger.info(f"Attempting to load memories from HF Dataset: {HF_MEMORY_DATASET_REPO}")
135
  dataset = load_dataset(HF_MEMORY_DATASET_REPO, token=HF_TOKEN, trust_remote_code=True)
136
  if "train" in dataset and "memory_json" in dataset["train"].column_names:
137
+ num_rows = len(dataset["train"])
138
+ logger.info(f"HF Dataset for memories found. 'train' split has {num_rows} rows.")
139
+ temp_memories_json = [m_json for m_json in dataset["train"]["memory_json"] if isinstance(m_json, str) and m_json.strip()]
140
+ logger.info(f"Extracted {len(temp_memories_json)} valid memory JSON strings from the dataset.")
141
+ else:
142
+ logger.warning(f"HF Dataset {HF_MEMORY_DATASET_REPO} loaded, but 'train' split or 'memory_json' column is missing. Dataset structure: {dataset}")
143
+ except Exception as e: logger.error(f"Error loading memories from HF Dataset ({HF_MEMORY_DATASET_REPO}): {e}", exc_info=True)
144
 
145
  _memory_items_list = temp_memories_json
146
+ logger.info(f"Loaded {len(_memory_items_list)} memory items into cache from {STORAGE_BACKEND}.")
147
 
 
148
  _faiss_memory_index = faiss.IndexFlatL2(_dimension)
149
  if _memory_items_list:
150
  logger.info(f"Building FAISS index for {len(_memory_items_list)} memories...")
 
151
  texts_to_embed_mem = []
152
  for mem_json_str in _memory_items_list:
153
  try:
154
  mem_obj = json.loads(mem_json_str)
 
155
  text = f"User: {mem_obj.get('user_input','')}\nAI: {mem_obj.get('bot_response','')}\nTakeaway: {mem_obj.get('metrics',{}).get('takeaway','N/A')}"
156
  texts_to_embed_mem.append(text)
157
+ except json.JSONDecodeError: logger.warning(f"Skipping malformed memory JSON for FAISS indexing: {mem_json_str[:100]}")
 
158
 
159
  if texts_to_embed_mem:
160
  try:
 
166
  except Exception as e_faiss_mem: logger.error(f"Error building FAISS memory index: {e_faiss_mem}")
167
  logger.info(f"FAISS memory index built. Total items: {_faiss_memory_index.ntotal if _faiss_memory_index else 'N/A'}")
168
 
169
+ # Load Rules
170
+ logger.info("Loading rules from persistent storage...")
 
171
  temp_rules_text = []
172
  if STORAGE_BACKEND == "RAM":
173
  pass
 
181
  logger.info(f"Attempting to load rules from HF Dataset: {HF_RULES_DATASET_REPO}")
182
  dataset = load_dataset(HF_RULES_DATASET_REPO, token=HF_TOKEN, trust_remote_code=True)
183
  if "train" in dataset and "rule_text" in dataset["train"].column_names:
184
+ num_rows = len(dataset["train"])
185
+ logger.info(f"HF Dataset for rules found. 'train' split has {num_rows} rows.")
186
  temp_rules_text = [r_text for r_text in dataset["train"]["rule_text"] if isinstance(r_text, str) and r_text.strip()]
187
+ logger.info(f"Extracted {len(temp_rules_text)} valid rule strings from the dataset.")
188
+ else:
189
+ logger.warning(f"HF Dataset {HF_RULES_DATASET_REPO} for rules loaded, but 'train' split or 'rule_text' column is missing. Dataset structure: {dataset}")
190
+ except Exception as e: logger.error(f"Error loading rules from HF Dataset ({HF_RULES_DATASET_REPO}): {e}", exc_info=True)
191
 
192
+ _rules_items_list = sorted(list(set(temp_rules_text)))
193
+ logger.info(f"Loaded {len(_rules_items_list)} rule items into cache from {STORAGE_BACKEND}.")
194
 
 
195
  _faiss_rules_index = faiss.IndexFlatL2(_dimension)
196
  if _rules_items_list:
197
  logger.info(f"Building FAISS index for {len(_rules_items_list)} rules...")
198
+ if _rules_items_list:
199
  try:
200
  embeddings = _embedder.encode(_rules_items_list, convert_to_tensor=False, show_progress_bar=False)
201
  embeddings_np = np.array(embeddings, dtype=np.float32)
 
208
  _initialized = True
209
  logger.info(f"Memory system initialization complete in {time.time() - init_start_time:.2f}s")
210
 
211
+ def _ensure_initialized(item_list, storage_type):
212
+ """Internal helper to check for initialization and trigger a reload if cache is empty on a persistent backend."""
213
+ global _initialized
214
+ if not _initialized or (storage_type != "RAM" and not item_list):
215
+ if not _initialized:
216
+ logger.warning("Memory system not initialized. Forcing initialization.")
217
+ else:
218
+ logger.warning(f"Persistent backend ({storage_type}) is configured, but cache is empty. Forcing re-initialization to reload data.")
219
+
220
+ with _init_lock:
221
+ _initialized = False
222
+ initialize_memory_system()
223
 
224
  # --- Memory Operations (Semantic) ---
225
  def add_memory_entry(user_input: str, metrics: dict, bot_response: str) -> tuple[bool, str]:
 
226
  global _memory_items_list, _faiss_memory_index
227
+ _ensure_initialized(_memory_items_list, STORAGE_BACKEND)
228
  if not _embedder or not _faiss_memory_index:
229
  return False, "Memory system or embedder not initialized for adding memory."
230
+ memory_obj = {"user_input": user_input, "metrics": metrics, "bot_response": bot_response, "timestamp": datetime.utcnow().isoformat()}
 
 
 
 
 
 
231
  memory_json_str = json.dumps(memory_obj)
 
232
  text_to_embed = f"User: {user_input}\nAI: {bot_response}\nTakeaway: {metrics.get('takeaway', 'N/A')}"
 
233
  try:
234
  embedding = _embedder.encode([text_to_embed], convert_to_tensor=False)
235
  embedding_np = np.array(embedding, dtype=np.float32).reshape(1, -1)
 
236
  if embedding_np.shape != (1, _dimension):
237
  logger.error(f"Memory embedding shape error: {embedding_np.shape}. Expected (1, {_dimension})")
238
  return False, "Embedding shape error."
 
 
239
  _faiss_memory_index.add(embedding_np)
 
 
240
  _memory_items_list.append(memory_json_str)
 
 
241
  if STORAGE_BACKEND == "SQLITE" and sqlite3:
242
  with _get_sqlite_connection() as conn:
243
  conn.execute("INSERT INTO memories (memory_json) VALUES (?)", (memory_json_str,))
244
  conn.commit()
245
  elif STORAGE_BACKEND == "HF_DATASET" and HF_TOKEN and Dataset:
 
246
  logger.info(f"Pushing {len(_memory_items_list)} memories to HF Hub: {HF_MEMORY_DATASET_REPO}")
247
+ Dataset.from_dict({"memory_json": list(_memory_items_list)}).push_to_hub(HF_MEMORY_DATASET_REPO, token=HF_TOKEN, private=True)
 
248
  logger.info(f"Added memory. RAM: {len(_memory_items_list)}, FAISS: {_faiss_memory_index.ntotal}")
249
  return True, "Memory added successfully."
250
  except Exception as e:
251
  logger.error(f"Error adding memory entry: {e}", exc_info=True)
 
252
  return False, f"Error adding memory: {e}"
253
 
254
  def retrieve_memories_semantic(query: str, k: int = 3) -> list[dict]:
255
+ _ensure_initialized(_memory_items_list, STORAGE_BACKEND)
 
256
  if not _embedder or not _faiss_memory_index or _faiss_memory_index.ntotal == 0:
257
+ logger.warning("Cannot retrieve memories: Embedder/FAISS index not ready or empty after initialization attempt.")
258
  return []
 
259
  try:
260
  query_embedding = _embedder.encode([query], convert_to_tensor=False)
261
  query_embedding_np = np.array(query_embedding, dtype=np.float32).reshape(1, -1)
 
262
  if query_embedding_np.shape[1] != _dimension:
263
  logger.error(f"Query embedding dimension mismatch. Expected {_dimension}, got {query_embedding_np.shape[1]}")
264
  return []
 
265
  distances, indices = _faiss_memory_index.search(query_embedding_np, min(k, _faiss_memory_index.ntotal))
 
266
  results = []
267
  for i in indices[0]:
268
  if 0 <= i < len(_memory_items_list):
269
+ try: results.append(json.loads(_memory_items_list[i]))
270
+ except json.JSONDecodeError: logger.warning(f"Could not parse memory JSON from list at index {i}")
271
+ else: logger.warning(f"FAISS index {i} out of bounds for memory_items_list (len: {len(_memory_items_list)})")
 
 
 
 
272
  logger.debug(f"Retrieved {len(results)} memories semantically for query: '{query[:50]}...'")
273
  return results
274
  except Exception as e:
275
  logger.error(f"Error retrieving memories semantically: {e}", exc_info=True)
276
  return []
277
 
 
278
  # --- Rule (Insight) Operations (Semantic) ---
279
  def add_rule_entry(rule_text: str) -> tuple[bool, str]:
 
280
  global _rules_items_list, _faiss_rules_index
281
+ _ensure_initialized(_rules_items_list, STORAGE_BACKEND)
282
+ if not _embedder or not _faiss_rules_index: return False, "Rule system or embedder not initialized."
 
 
283
  rule_text = rule_text.strip()
284
  if not rule_text: return False, "Rule text cannot be empty."
285
  if not re.match(r"\[(CORE_RULE|RESPONSE_PRINCIPLE|BEHAVIORAL_ADJUSTMENT|GENERAL_LEARNING)\|([\d\.]+?)\](.*)", rule_text, re.I|re.DOTALL):
286
  return False, "Invalid rule format."
287
+ if rule_text in _rules_items_list: return False, "duplicate"
 
 
288
  try:
289
  embedding = _embedder.encode([rule_text], convert_to_tensor=False)
290
  embedding_np = np.array(embedding, dtype=np.float32).reshape(1, -1)
291
+ if embedding_np.shape != (1, _dimension): return False, "Rule embedding shape error."
 
 
 
292
  _faiss_rules_index.add(embedding_np)
293
  _rules_items_list.append(rule_text)
294
  _rules_items_list.sort()
 
295
  if STORAGE_BACKEND == "SQLITE" and sqlite3:
296
  with _get_sqlite_connection() as conn:
297
  conn.execute("INSERT OR IGNORE INTO rules (rule_text) VALUES (?)", (rule_text,))
 
299
  elif STORAGE_BACKEND == "HF_DATASET" and HF_TOKEN and Dataset:
300
  logger.info(f"Pushing {len(_rules_items_list)} rules to HF Hub: {HF_RULES_DATASET_REPO}")
301
  Dataset.from_dict({"rule_text": list(_rules_items_list)}).push_to_hub(HF_RULES_DATASET_REPO, token=HF_TOKEN, private=True)
 
302
  logger.info(f"Added rule. RAM: {len(_rules_items_list)}, FAISS: {_faiss_rules_index.ntotal}")
303
  return True, "Rule added successfully."
304
  except Exception as e:
305
  logger.error(f"Error adding rule entry: {e}", exc_info=True)
 
 
 
 
 
306
  return False, f"Error adding rule: {e}"
307
 
308
  def retrieve_rules_semantic(query: str, k: int = 5) -> list[str]:
309
+ _ensure_initialized(_rules_items_list, STORAGE_BACKEND)
 
310
  if not _embedder or not _faiss_rules_index or _faiss_rules_index.ntotal == 0:
311
+ logger.warning("Cannot retrieve rules: Embedder/FAISS index not ready or empty after initialization attempt.")
312
  return []
313
  try:
314
  query_embedding = _embedder.encode([query], convert_to_tensor=False)
315
  query_embedding_np = np.array(query_embedding, dtype=np.float32).reshape(1, -1)
 
316
  if query_embedding_np.shape[1] != _dimension: return []
 
317
  distances, indices = _faiss_rules_index.search(query_embedding_np, min(k, _faiss_rules_index.ntotal))
318
  results = [_rules_items_list[i] for i in indices[0] if 0 <= i < len(_rules_items_list)]
319
  logger.debug(f"Retrieved {len(results)} rules semantically for query: '{query[:50]}...'")
 
323
  return []
324
 
325
  def remove_rule_entry(rule_text_to_delete: str) -> bool:
 
326
  global _rules_items_list, _faiss_rules_index
327
+ _ensure_initialized(_rules_items_list, STORAGE_BACKEND)
328
  if not _embedder or not _faiss_rules_index: return False
 
329
  rule_text_to_delete = rule_text_to_delete.strip()
330
+ if rule_text_to_delete not in _rules_items_list: return False
 
 
331
  try:
332
  _rules_items_list.remove(rule_text_to_delete)
333
+ _rules_items_list.sort()
 
 
334
  new_faiss_rules_index = faiss.IndexFlatL2(_dimension)
335
  if _rules_items_list:
336
  embeddings = _embedder.encode(_rules_items_list, convert_to_tensor=False)
337
  embeddings_np = np.array(embeddings, dtype=np.float32)
338
  if embeddings_np.ndim == 2 and embeddings_np.shape[0] == len(_rules_items_list) and embeddings_np.shape[1] == _dimension:
339
  new_faiss_rules_index.add(embeddings_np)
340
+ else:
341
  logger.error("Error rebuilding FAISS for rules after removal: Embedding shape error. State might be inconsistent.")
 
342
  _rules_items_list.append(rule_text_to_delete)
343
  _rules_items_list.sort()
344
+ return False
345
  _faiss_rules_index = new_faiss_rules_index
 
 
346
  if STORAGE_BACKEND == "SQLITE" and sqlite3:
347
  with _get_sqlite_connection() as conn:
348
  conn.execute("DELETE FROM rules WHERE rule_text = ?", (rule_text_to_delete,))
349
  conn.commit()
350
  elif STORAGE_BACKEND == "HF_DATASET" and HF_TOKEN and Dataset:
351
  Dataset.from_dict({"rule_text": list(_rules_items_list)}).push_to_hub(HF_RULES_DATASET_REPO, token=HF_TOKEN, private=True)
 
352
  logger.info(f"Removed rule. RAM: {len(_rules_items_list)}, FAISS: {_faiss_rules_index.ntotal}")
353
  return True
354
  except Exception as e:
355
  logger.error(f"Error removing rule entry: {e}", exc_info=True)
 
356
  return False
357
 
358
  # --- Utility functions to get all data (for UI display, etc.) ---
359
  def get_all_rules_cached() -> list[str]:
360
+ _ensure_initialized(_rules_items_list, STORAGE_BACKEND)
361
  return list(_rules_items_list)
362
 
363
  def get_all_memories_cached() -> list[dict]:
364
+ _ensure_initialized(_memory_items_list, STORAGE_BACKEND)
 
365
  mem_dicts = []
366
  for mem_json_str in _memory_items_list:
367
  try: mem_dicts.append(json.loads(mem_json_str))
368
+ except: pass
369
  return mem_dicts
370
 
371
  def clear_all_memory_data_backend() -> bool:
 
372
  global _memory_items_list, _faiss_memory_index
373
+ _ensure_initialized(_memory_items_list, STORAGE_BACKEND)
 
374
  success = True
375
  try:
376
  if STORAGE_BACKEND == "SQLITE" and sqlite3:
377
  with _get_sqlite_connection() as conn: conn.execute("DELETE FROM memories"); conn.commit()
378
  elif STORAGE_BACKEND == "HF_DATASET" and HF_TOKEN and Dataset:
 
379
  Dataset.from_dict({"memory_json": []}).push_to_hub(HF_MEMORY_DATASET_REPO, token=HF_TOKEN, private=True)
 
380
  _memory_items_list = []
381
+ if _faiss_memory_index: _faiss_memory_index.reset()
382
  logger.info("All memories cleared from backend and in-memory stores.")
383
  except Exception as e:
384
  logger.error(f"Error clearing all memory data: {e}")
 
386
  return success
387
 
388
  def clear_all_rules_data_backend() -> bool:
 
389
  global _rules_items_list, _faiss_rules_index
390
+ _ensure_initialized(_rules_items_list, STORAGE_BACKEND)
 
391
  success = True
392
  try:
393
  if STORAGE_BACKEND == "SQLITE" and sqlite3:
394
  with _get_sqlite_connection() as conn: conn.execute("DELETE FROM rules"); conn.commit()
395
  elif STORAGE_BACKEND == "HF_DATASET" and HF_TOKEN and Dataset:
396
  Dataset.from_dict({"rule_text": []}).push_to_hub(HF_RULES_DATASET_REPO, token=HF_TOKEN, private=True)
 
397
  _rules_items_list = []
398
  if _faiss_rules_index: _faiss_rules_index.reset()
399
  logger.info("All rules cleared from backend and in-memory stores.")
 
402
  success = False
403
  return success
404
 
 
405
  FAISS_MEMORY_PATH = os.path.join(os.getenv("FAISS_STORAGE_PATH", "app_data/faiss_indices"), "memory_index.faiss")
406
  FAISS_RULES_PATH = os.path.join(os.getenv("FAISS_STORAGE_PATH", "app_data/faiss_indices"), "rules_index.faiss")
407
 
408
  def save_faiss_indices_to_disk():
409
  if not _initialized or not faiss: return
 
410
  faiss_dir = os.path.dirname(FAISS_MEMORY_PATH)
411
  if not os.path.exists(faiss_dir): os.makedirs(faiss_dir, exist_ok=True)
 
412
  if _faiss_memory_index and _faiss_memory_index.ntotal > 0:
413
  try:
414
  faiss.write_index(_faiss_memory_index, FAISS_MEMORY_PATH)
415
  logger.info(f"Memory FAISS index saved to disk ({_faiss_memory_index.ntotal} items).")
416
  except Exception as e: logger.error(f"Error saving memory FAISS index: {e}")
 
417
  if _faiss_rules_index and _faiss_rules_index.ntotal > 0:
418
  try:
419
  faiss.write_index(_faiss_rules_index, FAISS_RULES_PATH)
 
423
  def load_faiss_indices_from_disk():
424
  global _faiss_memory_index, _faiss_rules_index
425
  if not _initialized or not faiss: return
426
+ if os.path.exists(FAISS_MEMORY_PATH) and _faiss_memory_index:
 
427
  try:
428
  logger.info(f"Loading memory FAISS index from {FAISS_MEMORY_PATH}...")
429
  _faiss_memory_index = faiss.read_index(FAISS_MEMORY_PATH)
430
  logger.info(f"Memory FAISS index loaded ({_faiss_memory_index.ntotal} items).")
 
431
  if _faiss_memory_index.ntotal != len(_memory_items_list) and len(_memory_items_list) > 0:
432
  logger.warning(f"Memory FAISS index count ({_faiss_memory_index.ntotal}) differs from loaded texts ({len(_memory_items_list)}). Consider rebuilding FAISS.")
433
  except Exception as e: logger.error(f"Error loading memory FAISS index: {e}. Will use fresh index.")
 
434
  if os.path.exists(FAISS_RULES_PATH) and _faiss_rules_index:
435
  try:
436
  logger.info(f"Loading rules FAISS index from {FAISS_RULES_PATH}...")