# memory_logic.py import os import json import time from datetime import datetime import logging import re import threading # Conditionally import heavy dependencies try: from sentence_transformers import SentenceTransformer import faiss import numpy as np except ImportError: SentenceTransformer, faiss, np = None, None, None logging.warning("SentenceTransformers, FAISS, or NumPy not installed. Semantic search will be unavailable.") try: import sqlite3 except ImportError: sqlite3 = None logging.warning("sqlite3 module not available. SQLite backend will be unavailable.") try: from datasets import load_dataset, Dataset except ImportError: load_dataset, Dataset = None, None logging.warning("datasets library not installed. Hugging Face Dataset backend will be unavailable.") logger = logging.getLogger(__name__) # Suppress verbose logs from dependencies for lib_name in ["sentence_transformers", "faiss", "datasets", "huggingface_hub"]: if logging.getLogger(lib_name): # Check if logger exists logging.getLogger(lib_name).setLevel(logging.WARNING) # --- Configuration (Read directly from environment variables) --- STORAGE_BACKEND = os.getenv("STORAGE_BACKEND", "HF_DATASET").upper() #HF_DATASET, RAM, SQLITE SQLITE_DB_PATH = os.getenv("SQLITE_DB_PATH", "app_data/ai_memory.db") # Changed default path HF_TOKEN = os.getenv("HF_TOKEN") HF_MEMORY_DATASET_REPO = os.getenv("HF_MEMORY_DATASET_REPO", "broadfield-dev/ai-brain") # Example HF_RULES_DATASET_REPO = os.getenv("HF_RULES_DATASET_REPO", "broadfield-dev/ai-rules") # Example # --- Globals for RAG within this module --- _embedder = None _dimension = 384 # Default, will be set by embedder _faiss_memory_index = None _memory_items_list = [] # Stores JSON strings of memory objects for RAM, or loaded from DB/HF _faiss_rules_index = None _rules_items_list = [] # Stores rule text strings _initialized = False _init_lock = threading.Lock() # --- Helper: SQLite Connection --- def _get_sqlite_connection(): if not sqlite3: raise ImportError("sqlite3 module is required for SQLite backend but not found.") db_dir = os.path.dirname(SQLITE_DB_PATH) if db_dir and not os.path.exists(db_dir): os.makedirs(db_dir, exist_ok=True) return sqlite3.connect(SQLITE_DB_PATH, timeout=10) # Added timeout def _init_sqlite_tables(): if STORAGE_BACKEND != "SQLITE" or not sqlite3: return try: with _get_sqlite_connection() as conn: cursor = conn.cursor() # Stores JSON string of the memory object cursor.execute(""" CREATE TABLE IF NOT EXISTS memories ( id INTEGER PRIMARY KEY AUTOINCREMENT, memory_json TEXT NOT NULL, # Optionally add embedding here if not using separate FAISS index # embedding BLOB, created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP ) """) # Stores the rule text directly cursor.execute(""" CREATE TABLE IF NOT EXISTS rules ( id INTEGER PRIMARY KEY AUTOINCREMENT, rule_text TEXT NOT NULL UNIQUE, # embedding BLOB, created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP ) """) conn.commit() logger.info("SQLite tables for memories and rules checked/created.") except Exception as e: logger.error(f"SQLite table initialization error: {e}", exc_info=True) # --- Initialization --- def initialize_memory_system(): global _initialized, _embedder, _dimension, _faiss_memory_index, _memory_items_list, _faiss_rules_index, _rules_items_list with _init_lock: if _initialized: logger.info("Memory system already initialized.") return logger.info(f"Initializing memory system with backend: {STORAGE_BACKEND}") init_start_time = time.time() # 1. Load Sentence Transformer Model (always needed for semantic operations) if not SentenceTransformer or not faiss or not np: logger.error("Core RAG libraries (SentenceTransformers, FAISS, NumPy) not available. Cannot initialize semantic memory.") _initialized = False # Mark as not properly initialized return if not _embedder: try: logger.info("Loading SentenceTransformer model (all-MiniLM-L6-v2)...") _embedder = SentenceTransformer('all-MiniLM-L6-v2', cache_folder="./sentence_transformer_cache") _dimension = _embedder.get_sentence_embedding_dimension() or 384 logger.info(f"SentenceTransformer loaded. Dimension: {_dimension}") except Exception as e: logger.critical(f"FATAL: Error loading SentenceTransformer: {e}", exc_info=True) _initialized = False return # Cannot proceed without embedder # 2. Initialize SQLite if used if STORAGE_BACKEND == "SQLITE": _init_sqlite_tables() # 3. Load Memories logger.info("Loading memories...") temp_memories_json = [] if STORAGE_BACKEND == "RAM": _memory_items_list = [] # Start fresh for RAM backend elif STORAGE_BACKEND == "SQLITE" and sqlite3: try: with _get_sqlite_connection() as conn: temp_memories_json = [row[0] for row in conn.execute("SELECT memory_json FROM memories ORDER BY created_at ASC")] except Exception as e: logger.error(f"Error loading memories from SQLite: {e}") elif STORAGE_BACKEND == "HF_DATASET" and HF_TOKEN and Dataset and load_dataset: try: logger.info(f"Attempting to load memories from HF Dataset: {HF_MEMORY_DATASET_REPO}") dataset = load_dataset(HF_MEMORY_DATASET_REPO, token=HF_TOKEN, trust_remote_code=True) # Add download_mode if needed if "train" in dataset and "memory_json" in dataset["train"].column_names: # Assuming 'memory_json' column temp_memories_json = [m_json for m_json in dataset["train"]["memory_json"] if isinstance(m_json, str)] else: logger.warning(f"HF Dataset {HF_MEMORY_DATASET_REPO} for memories not found or 'memory_json' column missing.") except Exception as e: logger.error(f"Error loading memories from HF Dataset ({HF_MEMORY_DATASET_REPO}): {e}") _memory_items_list = temp_memories_json logger.info(f"Loaded {len(_memory_items_list)} memory items from {STORAGE_BACKEND}.") # 4. Build/Load FAISS Memory Index _faiss_memory_index = faiss.IndexFlatL2(_dimension) if _memory_items_list: logger.info(f"Building FAISS index for {len(_memory_items_list)} memories...") # Extract text to embed from memory JSON objects texts_to_embed_mem = [] for mem_json_str in _memory_items_list: try: mem_obj = json.loads(mem_json_str) # Consistent embedding strategy: user input + bot response + takeaway text = f"User: {mem_obj.get('user_input','')}\nAI: {mem_obj.get('bot_response','')}\nTakeaway: {mem_obj.get('metrics',{}).get('takeaway','N/A')}" texts_to_embed_mem.append(text) except json.JSONDecodeError: logger.warning(f"Skipping malformed memory JSON for FAISS indexing: {mem_json_str[:100]}") if texts_to_embed_mem: try: embeddings = _embedder.encode(texts_to_embed_mem, convert_to_tensor=False, show_progress_bar=False) # convert_to_numpy=True embeddings_np = np.array(embeddings, dtype=np.float32) if embeddings_np.ndim == 2 and embeddings_np.shape[0] == len(texts_to_embed_mem) and embeddings_np.shape[1] == _dimension: _faiss_memory_index.add(embeddings_np) else: logger.error(f"Memory embeddings shape error. Expected ({len(texts_to_embed_mem)}, {_dimension}), Got {embeddings_np.shape if hasattr(embeddings_np, 'shape') else 'N/A'}") except Exception as e_faiss_mem: logger.error(f"Error building FAISS memory index: {e_faiss_mem}") logger.info(f"FAISS memory index built. Total items: {_faiss_memory_index.ntotal if _faiss_memory_index else 'N/A'}") # 5. Load Rules logger.info("Loading rules...") temp_rules_text = [] if STORAGE_BACKEND == "RAM": _rules_items_list = [] elif STORAGE_BACKEND == "SQLITE" and sqlite3: try: with _get_sqlite_connection() as conn: temp_rules_text = [row[0] for row in conn.execute("SELECT rule_text FROM rules ORDER BY created_at ASC")] except Exception as e: logger.error(f"Error loading rules from SQLite: {e}") elif STORAGE_BACKEND == "HF_DATASET" and HF_TOKEN and Dataset and load_dataset: try: logger.info(f"Attempting to load rules from HF Dataset: {HF_RULES_DATASET_REPO}") dataset = load_dataset(HF_RULES_DATASET_REPO, token=HF_TOKEN, trust_remote_code=True) if "train" in dataset and "rule_text" in dataset["train"].column_names: temp_rules_text = [r_text for r_text in dataset["train"]["rule_text"] if isinstance(r_text, str) and r_text.strip()] else: logger.warning(f"HF Dataset {HF_RULES_DATASET_REPO} for rules not found or 'rule_text' column missing.") except Exception as e: logger.error(f"Error loading rules from HF Dataset ({HF_RULES_DATASET_REPO}): {e}") _rules_items_list = sorted(list(set(temp_rules_text))) # Ensure unique and sorted logger.info(f"Loaded {len(_rules_items_list)} rule items from {STORAGE_BACKEND}.") # 6. Build/Load FAISS Rules Index _faiss_rules_index = faiss.IndexFlatL2(_dimension) if _rules_items_list: logger.info(f"Building FAISS index for {len(_rules_items_list)} rules...") if _rules_items_list: # Check again in case it became empty after filtering try: embeddings = _embedder.encode(_rules_items_list, convert_to_tensor=False, show_progress_bar=False) embeddings_np = np.array(embeddings, dtype=np.float32) if embeddings_np.ndim == 2 and embeddings_np.shape[0] == len(_rules_items_list) and embeddings_np.shape[1] == _dimension: _faiss_rules_index.add(embeddings_np) else: logger.error(f"Rule embeddings shape error. Expected ({len(_rules_items_list)}, {_dimension}), Got {embeddings_np.shape if hasattr(embeddings_np, 'shape') else 'N/A'}") except Exception as e_faiss_rule: logger.error(f"Error building FAISS rule index: {e_faiss_rule}") logger.info(f"FAISS rules index built. Total items: {_faiss_rules_index.ntotal if _faiss_rules_index else 'N/A'}") _initialized = True logger.info(f"Memory system initialization complete in {time.time() - init_start_time:.2f}s") # --- Memory Operations (Semantic) --- def add_memory_entry(user_input: str, metrics: dict, bot_response: str) -> tuple[bool, str]: """Adds a memory entry to the configured backend and FAISS index.""" global _memory_items_list, _faiss_memory_index if not _initialized: initialize_memory_system() if not _embedder or not _faiss_memory_index: return False, "Memory system or embedder not initialized for adding memory." memory_obj = { "user_input": user_input, "metrics": metrics, "bot_response": bot_response, "timestamp": datetime.utcnow().isoformat() } memory_json_str = json.dumps(memory_obj) text_to_embed = f"User: {user_input}\nAI: {bot_response}\nTakeaway: {metrics.get('takeaway', 'N/A')}" try: embedding = _embedder.encode([text_to_embed], convert_to_tensor=False) embedding_np = np.array(embedding, dtype=np.float32).reshape(1, -1) if embedding_np.shape != (1, _dimension): logger.error(f"Memory embedding shape error: {embedding_np.shape}. Expected (1, {_dimension})") return False, "Embedding shape error." # Add to FAISS _faiss_memory_index.add(embedding_np) # Add to in-memory list _memory_items_list.append(memory_json_str) # Add to persistent storage if STORAGE_BACKEND == "SQLITE" and sqlite3: with _get_sqlite_connection() as conn: conn.execute("INSERT INTO memories (memory_json) VALUES (?)", (memory_json_str,)) conn.commit() elif STORAGE_BACKEND == "HF_DATASET" and HF_TOKEN and Dataset: # This can be slow, consider batching or async push logger.info(f"Pushing {len(_memory_items_list)} memories to HF Hub: {HF_MEMORY_DATASET_REPO}") Dataset.from_dict({"memory_json": list(_memory_items_list)}).push_to_hub(HF_MEMORY_DATASET_REPO, token=HF_TOKEN, private=True) # Ensure 'private' as needed logger.info(f"Added memory. RAM: {len(_memory_items_list)}, FAISS: {_faiss_memory_index.ntotal}") return True, "Memory added successfully." except Exception as e: logger.error(f"Error adding memory entry: {e}", exc_info=True) # TODO: Potential rollback logic if FAISS add succeeded but backend failed (complex) return False, f"Error adding memory: {e}" def retrieve_memories_semantic(query: str, k: int = 3) -> list[dict]: """Retrieves k most relevant memories using semantic search.""" if not _initialized: initialize_memory_system() if not _embedder or not _faiss_memory_index or _faiss_memory_index.ntotal == 0: logger.debug("Cannot retrieve memories: Embedder, FAISS index not ready, or index is empty.") return [] try: query_embedding = _embedder.encode([query], convert_to_tensor=False) query_embedding_np = np.array(query_embedding, dtype=np.float32).reshape(1, -1) if query_embedding_np.shape[1] != _dimension: logger.error(f"Query embedding dimension mismatch. Expected {_dimension}, got {query_embedding_np.shape[1]}") return [] distances, indices = _faiss_memory_index.search(query_embedding_np, min(k, _faiss_memory_index.ntotal)) results = [] for i in indices[0]: if 0 <= i < len(_memory_items_list): try: results.append(json.loads(_memory_items_list[i])) except json.JSONDecodeError: logger.warning(f"Could not parse memory JSON from list at index {i}") else: logger.warning(f"FAISS index {i} out of bounds for memory_items_list (len: {len(_memory_items_list)})") logger.debug(f"Retrieved {len(results)} memories semantically for query: '{query[:50]}...'") return results except Exception as e: logger.error(f"Error retrieving memories semantically: {e}", exc_info=True) return [] # --- Rule (Insight) Operations (Semantic) --- def add_rule_entry(rule_text: str) -> tuple[bool, str]: """Adds a rule if valid and not a duplicate. Updates backend and FAISS.""" global _rules_items_list, _faiss_rules_index if not _initialized: initialize_memory_system() if not _embedder or not _faiss_rules_index: return False, "Rule system or embedder not initialized." rule_text = rule_text.strip() if not rule_text: return False, "Rule text cannot be empty." if not re.match(r"\[(CORE_RULE|RESPONSE_PRINCIPLE|BEHAVIORAL_ADJUSTMENT|GENERAL_LEARNING)\|([\d\.]+?)\](.*)", rule_text, re.I|re.DOTALL): return False, "Invalid rule format." if rule_text in _rules_items_list: return False, "duplicate" try: embedding = _embedder.encode([rule_text], convert_to_tensor=False) embedding_np = np.array(embedding, dtype=np.float32).reshape(1, -1) if embedding_np.shape != (1, _dimension): return False, "Rule embedding shape error." _faiss_rules_index.add(embedding_np) _rules_items_list.append(rule_text) _rules_items_list.sort() if STORAGE_BACKEND == "SQLITE" and sqlite3: with _get_sqlite_connection() as conn: conn.execute("INSERT OR IGNORE INTO rules (rule_text) VALUES (?)", (rule_text,)) conn.commit() elif STORAGE_BACKEND == "HF_DATASET" and HF_TOKEN and Dataset: logger.info(f"Pushing {len(_rules_items_list)} rules to HF Hub: {HF_RULES_DATASET_REPO}") Dataset.from_dict({"rule_text": list(_rules_items_list)}).push_to_hub(HF_RULES_DATASET_REPO, token=HF_TOKEN, private=True) logger.info(f"Added rule. RAM: {len(_rules_items_list)}, FAISS: {_faiss_rules_index.ntotal}") return True, "Rule added successfully." except Exception as e: logger.error(f"Error adding rule entry: {e}", exc_info=True) # Basic rollback if FAISS add succeeded if rule_text in _rules_items_list and _faiss_rules_index.ntotal > 0: # Crude check # A full rollback would involve rebuilding FAISS index from _rules_items_list before append. # For simplicity, this is omitted here. State could be inconsistent on error. pass return False, f"Error adding rule: {e}" def retrieve_rules_semantic(query: str, k: int = 5) -> list[str]: """Retrieves k most relevant rules using semantic search.""" if not _initialized: initialize_memory_system() if not _embedder or not _faiss_rules_index or _faiss_rules_index.ntotal == 0: return [] try: query_embedding = _embedder.encode([query], convert_to_tensor=False) query_embedding_np = np.array(query_embedding, dtype=np.float32).reshape(1, -1) if query_embedding_np.shape[1] != _dimension: return [] distances, indices = _faiss_rules_index.search(query_embedding_np, min(k, _faiss_rules_index.ntotal)) results = [_rules_items_list[i] for i in indices[0] if 0 <= i < len(_rules_items_list)] logger.debug(f"Retrieved {len(results)} rules semantically for query: '{query[:50]}...'") return results except Exception as e: logger.error(f"Error retrieving rules semantically: {e}", exc_info=True) return [] def remove_rule_entry(rule_text_to_delete: str) -> bool: """Removes a rule from backend and rebuilds FAISS for rules.""" global _rules_items_list, _faiss_rules_index if not _initialized: initialize_memory_system() if not _embedder or not _faiss_rules_index: return False rule_text_to_delete = rule_text_to_delete.strip() if rule_text_to_delete not in _rules_items_list: return False # Not found try: _rules_items_list.remove(rule_text_to_delete) _rules_items_list.sort() # Maintain sorted order # Rebuild FAISS index for rules (simplest way to ensure consistency after removal) new_faiss_rules_index = faiss.IndexFlatL2(_dimension) if _rules_items_list: embeddings = _embedder.encode(_rules_items_list, convert_to_tensor=False) embeddings_np = np.array(embeddings, dtype=np.float32) if embeddings_np.ndim == 2 and embeddings_np.shape[0] == len(_rules_items_list) and embeddings_np.shape[1] == _dimension: new_faiss_rules_index.add(embeddings_np) else: # Should not happen if list is consistent logger.error("Error rebuilding FAISS for rules after removal: Embedding shape error. State might be inconsistent.") # Attempt to revert _rules_items_list (add back the rule) _rules_items_list.append(rule_text_to_delete) _rules_items_list.sort() return False # Indicate failure _faiss_rules_index = new_faiss_rules_index # Remove from persistent storage if STORAGE_BACKEND == "SQLITE" and sqlite3: with _get_sqlite_connection() as conn: conn.execute("DELETE FROM rules WHERE rule_text = ?", (rule_text_to_delete,)) conn.commit() elif STORAGE_BACKEND == "HF_DATASET" and HF_TOKEN and Dataset: Dataset.from_dict({"rule_text": list(_rules_items_list)}).push_to_hub(HF_RULES_DATASET_REPO, token=HF_TOKEN, private=True) logger.info(f"Removed rule. RAM: {len(_rules_items_list)}, FAISS: {_faiss_rules_index.ntotal}") return True except Exception as e: logger.error(f"Error removing rule entry: {e}", exc_info=True) # Potential partial failure, state might be inconsistent. return False # --- Utility functions to get all data (for UI display, etc.) --- def get_all_rules_cached() -> list[str]: if not _initialized: initialize_memory_system() return list(_rules_items_list) def get_all_memories_cached() -> list[dict]: if not _initialized: initialize_memory_system() # Convert JSON strings to dicts for easier use by UI mem_dicts = [] for mem_json_str in _memory_items_list: try: mem_dicts.append(json.loads(mem_json_str)) except: pass # Ignore parse errors for display return mem_dicts def clear_all_memory_data_backend() -> bool: """Clears all memories from backend and resets in-memory FAISS/list.""" global _memory_items_list, _faiss_memory_index if not _initialized: initialize_memory_system() success = True try: if STORAGE_BACKEND == "SQLITE" and sqlite3: with _get_sqlite_connection() as conn: conn.execute("DELETE FROM memories"); conn.commit() elif STORAGE_BACKEND == "HF_DATASET" and HF_TOKEN and Dataset: # Deleting from HF usually means pushing an empty dataset Dataset.from_dict({"memory_json": []}).push_to_hub(HF_MEMORY_DATASET_REPO, token=HF_TOKEN, private=True) _memory_items_list = [] if _faiss_memory_index: _faiss_memory_index.reset() # Clear FAISS index logger.info("All memories cleared from backend and in-memory stores.") except Exception as e: logger.error(f"Error clearing all memory data: {e}") success = False return success def clear_all_rules_data_backend() -> bool: """Clears all rules from backend and resets in-memory FAISS/list.""" global _rules_items_list, _faiss_rules_index if not _initialized: initialize_memory_system() success = True try: if STORAGE_BACKEND == "SQLITE" and sqlite3: with _get_sqlite_connection() as conn: conn.execute("DELETE FROM rules"); conn.commit() elif STORAGE_BACKEND == "HF_DATASET" and HF_TOKEN and Dataset: Dataset.from_dict({"rule_text": []}).push_to_hub(HF_RULES_DATASET_REPO, token=HF_TOKEN, private=True) _rules_items_list = [] if _faiss_rules_index: _faiss_rules_index.reset() logger.info("All rules cleared from backend and in-memory stores.") except Exception as e: logger.error(f"Error clearing all rules data: {e}") success = False return success # Optional: Function to save FAISS indices to disk (from ai-learn, if needed for persistence between app runs with RAM backend) FAISS_MEMORY_PATH = os.path.join(os.getenv("FAISS_STORAGE_PATH", "app_data/faiss_indices"), "memory_index.faiss") FAISS_RULES_PATH = os.path.join(os.getenv("FAISS_STORAGE_PATH", "app_data/faiss_indices"), "rules_index.faiss") def save_faiss_indices_to_disk(): if not _initialized or not faiss: return faiss_dir = os.path.dirname(FAISS_MEMORY_PATH) if not os.path.exists(faiss_dir): os.makedirs(faiss_dir, exist_ok=True) if _faiss_memory_index and _faiss_memory_index.ntotal > 0: try: faiss.write_index(_faiss_memory_index, FAISS_MEMORY_PATH) logger.info(f"Memory FAISS index saved to disk ({_faiss_memory_index.ntotal} items).") except Exception as e: logger.error(f"Error saving memory FAISS index: {e}") if _faiss_rules_index and _faiss_rules_index.ntotal > 0: try: faiss.write_index(_faiss_rules_index, FAISS_RULES_PATH) logger.info(f"Rules FAISS index saved to disk ({_faiss_rules_index.ntotal} items).") except Exception as e: logger.error(f"Error saving rules FAISS index: {e}") def load_faiss_indices_from_disk(): global _faiss_memory_index, _faiss_rules_index if not _initialized or not faiss: return if os.path.exists(FAISS_MEMORY_PATH) and _faiss_memory_index: # Check if index object exists try: logger.info(f"Loading memory FAISS index from {FAISS_MEMORY_PATH}...") _faiss_memory_index = faiss.read_index(FAISS_MEMORY_PATH) logger.info(f"Memory FAISS index loaded ({_faiss_memory_index.ntotal} items).") # Consistency check: FAISS ntotal vs len(_memory_items_list) if _faiss_memory_index.ntotal != len(_memory_items_list) and len(_memory_items_list) > 0: logger.warning(f"Memory FAISS index count ({_faiss_memory_index.ntotal}) differs from loaded texts ({len(_memory_items_list)}). Consider rebuilding FAISS.") except Exception as e: logger.error(f"Error loading memory FAISS index: {e}. Will use fresh index.") if os.path.exists(FAISS_RULES_PATH) and _faiss_rules_index: try: logger.info(f"Loading rules FAISS index from {FAISS_RULES_PATH}...") _faiss_rules_index = faiss.read_index(FAISS_RULES_PATH) logger.info(f"Rules FAISS index loaded ({_faiss_rules_index.ntotal} items).") if _faiss_rules_index.ntotal != len(_rules_items_list) and len(_rules_items_list) > 0: logger.warning(f"Rules FAISS index count ({_faiss_rules_index.ntotal}) differs from loaded texts ({len(_rules_items_list)}). Consider rebuilding FAISS.") except Exception as e: logger.error(f"Error loading rules FAISS index: {e}. Will use fresh index.")