Spaces:
Sleeping
Sleeping
# memory_logic.py | |
import os | |
import json | |
import time | |
from datetime import datetime | |
import logging | |
import re | |
import threading | |
try: | |
from sentence_transformers import SentenceTransformer | |
import faiss | |
import numpy as np | |
except ImportError: | |
SentenceTransformer, faiss, np = None, None, None | |
logging.warning("SentenceTransformers, FAISS, or NumPy not installed. Semantic search will be unavailable.") | |
try: | |
import sqlite3 | |
except ImportError: | |
sqlite3 = None | |
logging.warning("sqlite3 module not available. SQLite backend will be unavailable.") | |
try: | |
from datasets import load_dataset, Dataset | |
except ImportError: | |
load_dataset, Dataset = None, None | |
logging.warning("datasets library not installed. Hugging Face Dataset backend will be unavailable.") | |
logger = logging.getLogger(__name__) | |
for lib_name in ["sentence_transformers", "faiss", "datasets", "huggingface_hub"]: | |
if logging.getLogger(lib_name): | |
logging.getLogger(lib_name).setLevel(logging.WARNING) | |
STORAGE_BACKEND = os.getenv("STORAGE_BACKEND", "RAM").upper() | |
SQLITE_DB_PATH = os.getenv("SQLITE_DB_PATH", "app_data/ai_memory.db") | |
HF_TOKEN = os.getenv("HF_TOKEN") | |
HF_MEMORY_DATASET_REPO = os.getenv("HF_MEMORY_DATASET_REPO", "broadfield-dev/ai-brain") | |
HF_RULES_DATASET_REPO = os.getenv("HF_RULES_DATASET_REPO", "broadfield-dev/ai-rules") | |
_embedder = None | |
_dimension = 384 | |
_long_term_memory_items_list = [] | |
_faiss_long_term_memory_index = None | |
_short_term_memory_items_list = [] | |
_faiss_short_term_memory_index = None | |
_rules_items_list = [] | |
_faiss_rules_index = None | |
_initialized = False | |
_init_lock = threading.Lock() | |
def _get_sqlite_connection(): | |
if not sqlite3: | |
raise ImportError("sqlite3 module is required for SQLite backend but not found.") | |
db_dir = os.path.dirname(SQLITE_DB_PATH) | |
if db_dir and not os.path.exists(db_dir): | |
os.makedirs(db_dir, exist_ok=True) | |
return sqlite3.connect(SQLITE_DB_PATH, timeout=10) | |
def _build_faiss_index_from_json_strings(memory_items: list[str]) -> faiss.Index | None: | |
if not memory_items or not _embedder: | |
return faiss.IndexFlatL2(_dimension) | |
texts_to_embed = [] | |
valid_indices = [] | |
for i, mem_json_str in enumerate(memory_items): | |
try: | |
mem_obj = json.loads(mem_json_str) | |
text = f"User: {mem_obj.get('user_input', '')}\nAI: {mem_obj.get('bot_response', '')}\nTakeaway: {mem_obj.get('metrics', {}).get('takeaway', 'N/A')}" | |
texts_to_embed.append(text) | |
valid_indices.append(i) | |
except json.JSONDecodeError: | |
continue | |
if not texts_to_embed: | |
return faiss.IndexFlatL2(_dimension) | |
try: | |
embeddings = _embedder.encode(texts_to_embed, convert_to_tensor=False, show_progress_bar=False) | |
embeddings_np = np.array(embeddings, dtype=np.float32) | |
if embeddings_np.ndim == 2 and embeddings_np.shape[1] == _dimension: | |
index = faiss.IndexFlatL2(_dimension) | |
index.add(embeddings_np) | |
return index | |
else: | |
logger.error(f"Error building FAISS index: embedding shape mismatch.") | |
return faiss.IndexFlatL2(_dimension) | |
except Exception as e: | |
logger.error(f"Failed to build FAISS index: {e}", exc_info=True) | |
return faiss.IndexFlatL2(_dimension) | |
def initialize_memory_system(): | |
global _initialized, _embedder, _dimension | |
global _long_term_memory_items_list, _faiss_long_term_memory_index | |
global _short_term_memory_items_list, _faiss_short_term_memory_index | |
global _rules_items_list, _faiss_rules_index | |
with _init_lock: | |
if _initialized: | |
return | |
logger.info(f"Initializing memory system with backend: {STORAGE_BACKEND}") | |
init_start_time = time.time() | |
if not all([SentenceTransformer, faiss, np]): | |
logger.error("Core RAG libraries not available. Cannot initialize semantic memory.") | |
return | |
if not _embedder: | |
try: | |
_embedder = SentenceTransformer('all-MiniLM-L6-v2', cache_folder="./sentence_transformer_cache") | |
_dimension = _embedder.get_sentence_embedding_dimension() or 384 | |
except Exception as e: | |
logger.critical(f"FATAL: Error loading SentenceTransformer: {e}", exc_info=True) | |
return | |
long_term_mems = [] | |
if STORAGE_BACKEND == "SQLITE" and sqlite3: | |
try: | |
with _get_sqlite_connection() as conn: | |
long_term_mems = [row[0] for row in conn.execute("SELECT memory_json FROM memories ORDER BY created_at ASC")] | |
except Exception as e: logger.error(f"Error loading long-term memories from SQLite: {e}") | |
elif STORAGE_BACKEND == "HF_DATASET" and HF_TOKEN and Dataset: | |
try: | |
dataset = load_dataset(HF_MEMORY_DATASET_REPO, token=HF_TOKEN, trust_remote_code=True) | |
if "train" in dataset and "memory_json" in dataset["train"].column_names: | |
long_term_mems = [m for m in dataset["train"]["memory_json"] if isinstance(m, str)] | |
except Exception as e: logger.error(f"Error loading long-term memories from HF Dataset: {e}") | |
_long_term_memory_items_list = long_term_mems | |
logger.info(f"Loaded {len(_long_term_memory_items_list)} long-term memory items.") | |
_faiss_long_term_memory_index = _build_faiss_index_from_json_strings(_long_term_memory_items_list) | |
logger.info(f"Long-term memory FAISS index built. Total items: {_faiss_long_term_memory_index.ntotal if _faiss_long_term_memory_index else 'N/A'}") | |
_short_term_memory_items_list = [] | |
_faiss_short_term_memory_index = faiss.IndexFlatL2(_dimension) | |
logger.info("Short-term memory initialized (empty).") | |
temp_rules_text = [] | |
if STORAGE_BACKEND == "SQLITE" and sqlite3: | |
try: | |
with _get_sqlite_connection() as conn: temp_rules_text = [row[0] for row in conn.execute("SELECT rule_text FROM rules")] | |
except Exception: pass | |
elif STORAGE_BACKEND == "HF_DATASET" and HF_TOKEN and Dataset: | |
try: | |
dataset = load_dataset(HF_RULES_DATASET_REPO, token=HF_TOKEN, trust_remote_code=True) | |
if "train" in dataset and "rule_text" in dataset["train"].column_names: | |
temp_rules_text = [r for r in dataset["train"]["rule_text"] if isinstance(r, str) and r.strip()] | |
except Exception: pass | |
_rules_items_list = sorted(list(set(temp_rules_text))) | |
_faiss_rules_index = faiss.IndexFlatL2(_dimension) | |
if _rules_items_list: | |
rule_embeddings = _embedder.encode(_rules_items_list, convert_to_tensor=False) | |
_faiss_rules_index.add(np.array(rule_embeddings, dtype=np.float32)) | |
logger.info(f"Rules FAISS index built. Total items: {_faiss_rules_index.ntotal if _faiss_rules_index else 'N/A'}") | |
_initialized = True | |
logger.info(f"Memory system initialization complete in {time.time() - init_start_time:.2f}s") | |
def add_memory_entry(user_input: str, metrics: dict, bot_response: str) -> tuple[bool, str]: | |
if not _initialized: initialize_memory_system() | |
if not _embedder: return False, "Embedder not initialized." | |
memory_obj = {"user_input": user_input, "metrics": metrics, "bot_response": bot_response, "timestamp": datetime.utcnow().isoformat()} | |
memory_json_str = json.dumps(memory_obj) | |
text_to_embed = f"User: {user_input}\nAI: {bot_response}\nTakeaway: {metrics.get('takeaway', 'N/A')}" | |
try: | |
embedding = _embedder.encode([text_to_embed], convert_to_tensor=False) | |
embedding_np = np.array(embedding, dtype=np.float32).reshape(1, -1) | |
_faiss_short_term_memory_index.add(embedding_np) | |
_short_term_memory_items_list.append(memory_json_str) | |
if STORAGE_BACKEND == "SQLITE" and sqlite3: | |
with _get_sqlite_connection() as conn: | |
conn.execute("INSERT INTO memories (memory_json) VALUES (?)", (memory_json_str,)) | |
conn.commit() | |
elif STORAGE_BACKEND == "HF_DATASET" and HF_TOKEN and Dataset: | |
all_mems_for_push = _long_term_memory_items_list + _short_term_memory_items_list | |
Dataset.from_dict({"memory_json": list(set(all_mems_for_push))}).push_to_hub(HF_MEMORY_DATASET_REPO, token=HF_TOKEN, private=True) | |
logger.info(f"Added memory. Short-term count: {_faiss_short_term_memory_index.ntotal}") | |
return True, "Memory added successfully." | |
except Exception as e: | |
logger.error(f"Error adding memory entry: {e}", exc_info=True) | |
return False, f"Error adding memory: {e}" | |
def search_memories(query: str, k: int = 3, threshold: float = 1.0) -> tuple[list[dict], str]: | |
if not _initialized: initialize_memory_system() | |
if not _embedder: return [], "uninitialized" | |
query_embedding = np.array(_embedder.encode([query]), dtype=np.float32) | |
final_results = {} | |
search_path = "short" | |
if _faiss_short_term_memory_index and _faiss_short_term_memory_index.ntotal > 0: | |
distances, indices = _faiss_short_term_memory_index.search(query_embedding, min(k, _faiss_short_term_memory_index.ntotal)) | |
best_dist = distances[0][0] if len(distances[0]) > 0 else float('inf') | |
if best_dist < threshold: | |
logger.info(f"Found relevant short-term memories (best distance: {best_dist:.4f}).") | |
for i in indices[0]: | |
res = json.loads(_short_term_memory_items_list[i]) | |
final_results[res['timestamp']] = res | |
return list(final_results.values()), search_path | |
logger.info("No relevant short-term memories found. Escalating to deep search on long-term memory.") | |
search_path = "deep" | |
if _faiss_long_term_memory_index and _faiss_long_term_memory_index.ntotal > 0: | |
distances, indices = _faiss_long_term_memory_index.search(query_embedding, min(k, _faiss_long_term_memory_index.ntotal)) | |
for i in indices[0]: | |
res = json.loads(_long_term_memory_items_list[i]) | |
final_results[res['timestamp']] = res | |
return list(final_results.values()), search_path | |
def retrieve_rules_semantic(query: str, k: int = 5) -> list[str]: | |
if not _initialized: initialize_memory_system() | |
if not _faiss_rules_index or _faiss_rules_index.ntotal == 0: return [] | |
try: | |
q_embedding = np.array(_embedder.encode([query]), dtype=np.float32) | |
_, indices = _faiss_rules_index.search(q_embedding, min(k, _faiss_rules_index.ntotal)) | |
return [_rules_items_list[i] for i in indices[0]] | |
except Exception as e: | |
logger.error(f"Error retrieving rules: {e}", exc_info=True) | |
return [] | |
def get_all_memories_cached() -> list[dict]: | |
if not _initialized: initialize_memory_system() | |
all_mems = _long_term_memory_items_list + _short_term_memory_items_list | |
seen_ts = set() | |
unique_mem_dicts = [] | |
for mem_json_str in reversed(all_mems): | |
try: | |
mem_dict = json.loads(mem_json_str) | |
if mem_dict['timestamp'] not in seen_ts: | |
unique_mem_dicts.append(mem_dict) | |
seen_ts.add(mem_dict['timestamp']) | |
except: continue | |
return unique_mem_dicts | |
# --- The rest of the utility functions (add_rule, get_rules, clear functions) remain the same --- | |
def add_rule_entry(rule_text: str): | |
global _rules_items_list, _faiss_rules_index | |
if not _initialized: initialize_memory_system() | |
if not _embedder: return False, "Embedder not initialized." | |
rule_text = rule_text.strip() | |
if not rule_text or rule_text in _rules_items_list: return False, "duplicate or empty" | |
if not re.match(r"\[(CORE_RULE|RESPONSE_PRINCIPLE|BEHAVIORAL_ADJUSTMENT|GENERAL_LEARNING)\|([\d\.]+?)\](.*)", rule_text, re.I|re.DOTALL): | |
return False, "Invalid rule format." | |
try: | |
embedding = _embedder.encode([rule_text], convert_to_tensor=False) | |
_faiss_rules_index.add(np.array(embedding, dtype=np.float32)) | |
_rules_items_list.append(rule_text) | |
_rules_items_list.sort() | |
if STORAGE_BACKEND == "SQLITE" and sqlite3: | |
with _get_sqlite_connection() as conn: | |
conn.execute("INSERT OR IGNORE INTO rules (rule_text) VALUES (?)", (rule_text,)) | |
conn.commit() | |
elif STORAGE_BACKEND == "HF_DATASET" and HF_TOKEN and Dataset: | |
Dataset.from_dict({"rule_text": list(_rules_items_list)}).push_to_hub(HF_RULES_DATASET_REPO, token=HF_TOKEN, private=True) | |
return True, "Rule added" | |
except Exception as e: | |
logger.error(f"Error adding rule: {e}", exc_info=True) | |
return False, str(e) | |
def get_all_rules_cached() -> list[str]: | |
if not _initialized: initialize_memory_system() | |
return list(_rules_items_list) |