broadfield-dev commited on
Commit
14149fd
·
verified ·
1 Parent(s): 750febc

Update model_logic.py

Browse files
Files changed (1) hide show
  1. model_logic.py +73 -311
model_logic.py CHANGED
@@ -1,322 +1,84 @@
 
1
  import os
 
2
  import json
3
- import time
4
- from datetime import datetime
5
  import logging
6
- import re
7
- import threading
8
 
9
- try:
10
- from sentence_transformers import SentenceTransformer
11
- import faiss
12
- import numpy as np
13
- except ImportError:
14
- SentenceTransformer, faiss, np = None, None, None
15
- logging.warning("SentenceTransformers, FAISS, or NumPy not installed. Semantic search will be unavailable.")
16
-
17
- try:
18
- import sqlite3
19
- except ImportError:
20
- sqlite3 = None
21
- logging.warning("sqlite3 module not available. SQLite backend will be unavailable.")
22
-
23
- try:
24
- from datasets import load_dataset, Dataset
25
- except ImportError:
26
- load_dataset, Dataset = None, None
27
- logging.warning("datasets library not installed. Hugging Face Dataset backend will be unavailable.")
28
 
 
29
  logger = logging.getLogger(__name__)
30
- for lib_name in ["sentence_transformers", "faiss", "datasets", "huggingface_hub"]:
31
- if logging.getLogger(lib_name): logging.getLogger(lib_name).setLevel(logging.WARNING)
32
-
33
- STORAGE_BACKEND = os.getenv("STORAGE_BACKEND", "HF_DATASET").upper()
34
- SQLITE_DB_PATH = os.getenv("SQLITE_DB_PATH", "app_data/ai_memory.db")
35
- HF_TOKEN = os.getenv("HF_TOKEN")
36
- HF_MEMORY_DATASET_REPO = os.getenv("HF_MEMORY_DATASET_REPO", "broadfield-dev/ai-brain")
37
- HF_RULES_DATASET_REPO = os.getenv("HF_RULES_DATASET_REPO", "broadfield-dev/ai-rules")
38
-
39
- _embedder = None
40
- _dimension = 384
41
- _faiss_memory_index = None
42
- _memory_items_list = []
43
- _faiss_rules_index = None
44
- _rules_items_list = []
45
-
46
- _initialized = False
47
- _init_lock = threading.Lock()
48
-
49
- def _get_sqlite_connection():
50
- if not sqlite3: raise ImportError("sqlite3 module is required for SQLite backend.")
51
- db_dir = os.path.dirname(SQLITE_DB_PATH)
52
- if db_dir: os.makedirs(db_dir, exist_ok=True)
53
- return sqlite3.connect(SQLITE_DB_PATH, timeout=10)
54
-
55
- def _init_sqlite_tables():
56
- if STORAGE_BACKEND != "SQLITE" or not sqlite3: return
57
- try:
58
- with _get_sqlite_connection() as conn:
59
- cursor = conn.cursor()
60
- cursor.execute("CREATE TABLE IF NOT EXISTS memories (id INTEGER PRIMARY KEY, memory_json TEXT NOT NULL, created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP)")
61
- cursor.execute("CREATE TABLE IF NOT EXISTS rules (id INTEGER PRIMARY KEY, rule_text TEXT NOT NULL UNIQUE, created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP)")
62
- conn.commit()
63
- logger.info("SQLite tables checked/created.")
64
- except Exception as e:
65
- logger.error(f"SQLite table initialization error: {e}", exc_info=True)
66
-
67
- def _build_faiss_index(items_list, text_extraction_fn):
68
- if not _embedder:
69
- logger.error("Cannot build FAISS index: Embedder not available.")
70
- return None, []
71
-
72
- index = faiss.IndexFlatL2(_dimension)
73
- if not items_list: return index, []
74
-
75
- texts_to_embed, valid_items = [], []
76
- for item in items_list:
77
- try:
78
- texts_to_embed.append(text_extraction_fn(item))
79
- valid_items.append(item)
80
- except (json.JSONDecodeError, TypeError) as e:
81
- logger.warning(f"Skipping item during FAISS indexing due to processing error: {e}. Item: '{str(item)[:100]}...'")
82
-
83
- if not texts_to_embed:
84
- logger.warning("No valid items to embed for FAISS index after filtering.")
85
- return index, []
86
-
87
- try:
88
- embeddings = _embedder.encode(texts_to_embed, convert_to_tensor=False, show_progress_bar=False)
89
- embeddings_np = np.array(embeddings, dtype=np.float32)
90
- if embeddings_np.ndim == 2 and embeddings_np.shape[0] == len(texts_to_embed):
91
- index.add(embeddings_np)
92
- logger.info(f"FAISS index built with {index.ntotal} / {len(items_list)} items.")
93
- return index, valid_items
94
- else:
95
- logger.error(f"FAISS build failed: Embeddings shape error.")
96
- return faiss.IndexFlatL2(_dimension), []
97
- except Exception as e:
98
- logger.error(f"Error building FAISS index: {e}", exc_info=True)
99
- return faiss.IndexFlatL2(_dimension), []
100
-
101
- def initialize_memory_system():
102
- global _initialized, _embedder, _dimension, _faiss_memory_index, _memory_items_list, _faiss_rules_index, _rules_items_list
103
-
104
- with _init_lock:
105
- if _initialized: return
106
-
107
- logger.info(f"Initializing memory system with backend: {STORAGE_BACKEND}")
108
- init_start_time = time.time()
109
-
110
- if not all([SentenceTransformer, faiss, np]):
111
- logger.error("Core RAG libraries not available. Cannot initialize semantic memory.")
112
- return
113
-
114
- if not _embedder:
115
- try:
116
- logger.info("Loading SentenceTransformer model...")
117
- _embedder = SentenceTransformer('all-MiniLM-L6-v2', cache_folder="./sentence_transformer_cache")
118
- _dimension = _embedder.get_sentence_embedding_dimension() or 384
119
- except Exception as e:
120
- logger.critical(f"FATAL: Could not load SentenceTransformer model. Semantic search disabled. Error: {e}", exc_info=True)
121
- return
122
-
123
- if STORAGE_BACKEND == "SQLITE": _init_sqlite_tables()
124
-
125
- raw_mems = []
126
- if STORAGE_BACKEND == "SQLITE":
127
- try: raw_mems = [row[0] for row in _get_sqlite_connection().execute("SELECT memory_json FROM memories")]
128
- except Exception as e: logger.error(f"Error loading memories from SQLite: {e}")
129
- elif STORAGE_BACKEND == "HF_DATASET":
130
- try:
131
- dataset = load_dataset(HF_MEMORY_DATASET_REPO, token=HF_TOKEN, trust_remote_code=True)
132
- if "train" in dataset and "memory_json" in dataset["train"].column_names:
133
- raw_mems = [m for m in dataset["train"]["memory_json"] if isinstance(m, str) and m.strip()]
134
- except Exception as e: logger.error(f"Error loading memories from HF Dataset: {e}", exc_info=True)
135
-
136
- mem_index, valid_mems = _build_faiss_index(raw_mems, lambda m: f"User: {json.loads(m).get('user_input', '')}\nAI: {json.loads(m).get('bot_response', '')}")
137
- _faiss_memory_index = mem_index
138
- _memory_items_list = valid_mems
139
- logger.info(f"Loaded and indexed {len(_memory_items_list)} memories.")
140
-
141
- raw_rules = []
142
- if STORAGE_BACKEND == "SQLITE":
143
- try: raw_rules = [row[0] for row in _get_sqlite_connection().execute("SELECT rule_text FROM rules")]
144
- except Exception as e: logger.error(f"Error loading rules from SQLite: {e}")
145
- elif STORAGE_BACKEND == "HF_DATASET":
146
- try:
147
- dataset = load_dataset(HF_RULES_DATASET_REPO, token=HF_TOKEN, trust_remote_code=True)
148
- if "train" in dataset and "rule_text" in dataset["train"].column_names:
149
- raw_rules = [r for r in dataset["train"]["rule_text"] if isinstance(r, str) and r.strip()]
150
- except Exception as e: logger.error(f"Error loading rules from HF Dataset: {e}", exc_info=True)
151
-
152
- rule_index, valid_rules = _build_faiss_index(sorted(list(set(raw_rules))), lambda r: r)
153
- _faiss_rules_index = rule_index
154
- _rules_items_list = valid_rules
155
- logger.info(f"Loaded and indexed {len(_rules_items_list)} rules.")
156
-
157
- if _embedder and _faiss_memory_index is not None and _faiss_rules_index is not None:
158
- _initialized = True
159
- logger.info(f"Memory system initialization complete in {time.time() - init_start_time:.2f}s")
160
- else:
161
- logger.error("Memory system initialization failed. Core components are not ready.")
162
-
163
- def _verify_and_rebuild_if_needed(index, items_list, text_extraction_fn):
164
- global _memory_items_list, _rules_items_list
165
- if not index or index.ntotal != len(items_list):
166
- logger.warning(f"FAISS index mismatch detected (Index: {index.ntotal if index else 'None'}, List: {len(items_list)}). Rebuilding...")
167
- new_index, valid_items = _build_faiss_index(items_list, text_extraction_fn)
168
- if items_list is _memory_items_list:
169
- _memory_items_list = valid_items
170
- elif items_list is _rules_items_list:
171
- _rules_items_list = valid_items
172
- return new_index
173
- return index
174
 
175
- def add_memory_entry(user_input: str, metrics: dict, bot_response: str) -> tuple[bool, str]:
176
- global _memory_items_list, _faiss_memory_index
177
- if not _initialized: initialize_memory_system()
178
- if not _embedder or _faiss_memory_index is None: return False, "Memory system not ready."
179
 
180
- memory_obj = {"user_input": user_input, "metrics": metrics, "bot_response": bot_response, "timestamp": datetime.utcnow().isoformat()}
181
- memory_json_str = json.dumps(memory_obj)
182
- text_to_embed = f"User: {user_input}\nAI: {bot_response}\nTakeaway: {metrics.get('takeaway', 'N/A')}"
183
-
184
- try:
185
- embedding = _embedder.encode([text_to_embed], convert_to_tensor=False)
186
- _faiss_memory_index.add(np.array(embedding, dtype=np.float32))
187
- _memory_items_list.append(memory_json_str)
188
-
189
- if STORAGE_BACKEND == "SQLITE":
190
- with _get_sqlite_connection() as conn: conn.execute("INSERT INTO memories (memory_json) VALUES (?)", (memory_json_str,)); conn.commit()
191
- elif STORAGE_BACKEND == "HF_DATASET":
192
- Dataset.from_dict({"memory_json": list(_memory_items_list)}).push_to_hub(HF_MEMORY_DATASET_REPO, token=HF_TOKEN, private=True)
193
-
194
- return True, "Memory added."
195
- except Exception as e:
196
- logger.error(f"Error adding memory entry: {e}", exc_info=True)
197
- return False, f"Error: {e}"
198
-
199
- def retrieve_memories_semantic(query: str, k: int = 3) -> list[dict]:
200
- global _faiss_memory_index
201
- if not _initialized: initialize_memory_system()
202
- if not _faiss_memory_index or _faiss_memory_index.ntotal == 0: return []
203
-
204
- _faiss_memory_index = _verify_and_rebuild_if_needed(_faiss_memory_index, _memory_items_list, lambda m: f"User: {json.loads(m).get('user_input', '')}\nAI: {json.loads(m).get('bot_response', '')}")
205
- if not _faiss_memory_index or _faiss_memory_index.ntotal == 0: return []
206
-
207
- try:
208
- query_embedding = _embedder.encode([query], convert_to_tensor=False)
209
- distances, indices = _faiss_memory_index.search(np.array(query_embedding, dtype=np.float32), min(k, _faiss_memory_index.ntotal))
210
- return [json.loads(_memory_items_list[i]) for i in indices[0] if 0 <= i < len(_memory_items_list)]
211
- except Exception as e:
212
- logger.error(f"Error retrieving memories: {e}", exc_info=True)
213
- return []
214
-
215
- def add_rule_entry(rule_text: str) -> tuple[bool, str]:
216
- global _rules_items_list, _faiss_rules_index
217
- if not _initialized: initialize_memory_system()
218
- if not _embedder or _faiss_rules_index is None: return False, "Rule system not ready."
219
-
220
- rule_text = rule_text.strip()
221
- if not rule_text or rule_text in _rules_items_list: return False, "duplicate or invalid"
222
- if not re.match(r"\[(CORE_RULE|RESPONSE_PRINCIPLE|BEHAVIORAL_ADJUSTMENT|GENERAL_LEARNING)\|([\d\.]+?)\]", rule_text, re.I): return False, "Invalid format."
223
-
224
- try:
225
- embedding = _embedder.encode([rule_text], convert_to_tensor=False)
226
- _faiss_rules_index.add(np.array(embedding, dtype=np.float32))
227
- _rules_items_list.append(rule_text)
228
- _rules_items_list.sort()
229
-
230
- if STORAGE_BACKEND == "SQLITE":
231
- with _get_sqlite_connection() as conn: conn.execute("INSERT OR IGNORE INTO rules (rule_text) VALUES (?)", (rule_text,)); conn.commit()
232
- elif STORAGE_BACKEND == "HF_DATASET":
233
- Dataset.from_dict({"rule_text": list(_rules_items_list)}).push_to_hub(HF_RULES_DATASET_REPO, token=HF_TOKEN, private=True)
234
- return True, "Rule added."
235
- except Exception as e:
236
- logger.error(f"Error adding rule: {e}", exc_info=True)
237
- return False, f"Error: {e}"
238
-
239
- def retrieve_rules_semantic(query: str, k: int = 5) -> list[str]:
240
- global _faiss_rules_index
241
- if not _initialized: initialize_memory_system()
242
- if not _faiss_rules_index or _faiss_rules_index.ntotal == 0: return []
243
-
244
- _faiss_rules_index = _verify_and_rebuild_if_needed(_faiss_rules_index, _rules_items_list, lambda r: r)
245
- if not _faiss_rules_index or _faiss_rules_index.ntotal == 0: return []
246
-
247
- try:
248
- query_embedding = _embedder.encode([query], convert_to_tensor=False)
249
- distances, indices = _faiss_rules_index.search(np.array(query_embedding, dtype=np.float32), min(k, _faiss_rules_index.ntotal))
250
- return [_rules_items_list[i] for i in indices[0] if 0 <= i < len(_rules_items_list)]
251
- except Exception as e:
252
- logger.error(f"Error retrieving rules: {e}", exc_info=True)
253
- return []
254
-
255
- def remove_rule_entry(rule_text_to_delete: str) -> bool:
256
- global _rules_items_list, _faiss_rules_index
257
- if not _initialized: initialize_memory_system()
258
- rule_text_to_delete = rule_text_to_delete.strip()
259
- if rule_text_to_delete not in _rules_items_list: return False
260
- try:
261
- new_list = [r for r in _rules_items_list if r != rule_text_to_delete]
262
- new_index, valid_items = _build_faiss_index(new_list, lambda r: r)
263
- _faiss_rules_index = new_index
264
- _rules_items_list = valid_items
265
-
266
- if STORAGE_BACKEND == "SQLITE":
267
- with _get_sqlite_connection() as conn: conn.execute("DELETE FROM rules WHERE rule_text = ?", (rule_text_to_delete,)); conn.commit()
268
- elif STORAGE_BACKEND == "HF_DATASET" and _rules_items_list:
269
- Dataset.from_dict({"rule_text": list(_rules_items_list)}).push_to_hub(HF_RULES_DATASET_REPO, token=HF_TOKEN, private=True)
270
- return True
271
- except Exception as e:
272
- logger.error(f"Error removing rule: {e}", exc_info=True)
273
- return False
274
-
275
- def get_all_rules_cached() -> list[str]:
276
- if not _initialized: initialize_memory_system()
277
- return list(_rules_items_list)
278
-
279
- def get_all_memories_cached() -> list[dict]:
280
- if not _initialized: initialize_memory_system()
281
- valid_mems = []
282
- for m_str in _memory_items_list:
283
- try:
284
- valid_mems.append(json.loads(m_str))
285
- except json.JSONDecodeError:
286
- continue
287
- return valid_mems
288
-
289
- def clear_all_memory_data_backend() -> bool:
290
- global _memory_items_list, _faiss_memory_index
291
- if not _initialized: initialize_memory_system()
292
- _memory_items_list.clear()
293
- if _faiss_memory_index: _faiss_memory_index.reset()
294
- try:
295
- if STORAGE_BACKEND == "SQLITE":
296
- with _get_sqlite_connection() as conn: conn.execute("DELETE FROM memories"); conn.commit()
297
- elif STORAGE_BACKEND == "HF_DATASET":
298
- Dataset.from_dict({"memory_json": []}).push_to_hub(HF_MEMORY_DATASET_REPO, token=HF_TOKEN, private=True)
299
- return True
300
- except Exception as e:
301
- logger.error(f"Error clearing memory data: {e}"); return False
302
 
303
- def clear_all_rules_data_backend() -> bool:
304
- global _rules_items_list, _faiss_rules_index
305
- if not _initialized: initialize_memory_system()
306
- _rules_items_list.clear()
307
- if _faiss_rules_index: _faiss_rules_index.reset()
308
  try:
309
- if STORAGE_BACKEND == "SQLITE":
310
- with _get_sqlite_connection() as conn: conn.execute("DELETE FROM rules"); conn.commit()
311
- elif STORAGE_BACKEND == "HF_DATASET":
312
- Dataset.from_dict({"rule_text": []}).push_to_hub(HF_RULES_DATASET_REPO, token=HF_TOKEN, private=True)
313
- return True
 
 
 
 
 
 
 
 
 
 
 
 
 
314
  except Exception as e:
315
- logger.error(f"Error clearing rules data: {e}"); return False
316
-
317
- def save_faiss_indices_to_disk():
318
- if not _initialized or not faiss: return
319
- faiss_dir = "app_data/faiss_indices"
320
- os.makedirs(faiss_dir, exist_ok=True)
321
- if _faiss_memory_index: faiss.write_index(_faiss_memory_index, os.path.join(faiss_dir, "memory_index.faiss"))
322
- if _faiss_rules_index: faiss.write_index(_faiss_rules_index, os.path.join(faiss_dir, "rules_index.faiss"))
 
1
+ # model_logic.py
2
  import os
3
+ import requests
4
  import json
 
 
5
  import logging
6
+ from dotenv import load_dotenv
 
7
 
8
+ load_dotenv()
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
9
 
10
+ logging.basicConfig(level=logging.INFO, format="%(asctime)s - %(name)s - %(levelname)s - %(message)s")
11
  logger = logging.getLogger(__name__)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
12
 
13
+ API_KEYS_ENV_VARS = {"GROQ": 'GROQ_API_KEY', "OPENROUTER": 'OPENROUTER_API_KEY', "TOGETHERAI": 'TOGETHERAI_API_KEY', "COHERE": 'COHERE_API_KEY', "XAI": 'XAI_API_KEY', "OPENAI": 'OPENAI_API_KEY', "GOOGLE": 'GOOGLE_API_KEY', "HUGGINGFACE": 'HF_TOKEN'}
14
+ API_URLS = {"GROQ": 'https://api.groq.com/openai/v1/chat/completions', "OPENROUTER": 'https://openrouter.ai/api/v1/chat/completions', "TOGETHERAI": 'https://api.together.ai/v1/chat/completions', "COHERE": 'https://api.cohere.ai/v1/chat', "XAI": 'https://api.x.ai/v1/chat/completions', "OPENAI": 'https://api.openai.com/v1/chat/completions', "GOOGLE": 'https://generativelanguage.googleapis.com/v1beta/models/', "HUGGINGFACE": 'https://api-inference.huggingface.co/models/'}
 
 
15
 
16
+ try:
17
+ with open("models.json", "r") as f: MODELS_BY_PROVIDER = json.load(f)
18
+ except (FileNotFoundError, json.JSONDecodeError):
19
+ logger.warning("models.json not found or invalid. Using a fallback model list.")
20
+ MODELS_BY_PROVIDER = {"groq": {"default": "llama3-8b-8192", "models": {"Llama 3 8B (Groq)": "llama3-8b-8192"}}}
21
+
22
+ def _get_api_key(provider: str, ui_api_key_override: str = None) -> str | None:
23
+ provider_upper = provider.upper()
24
+ if ui_api_key_override and ui_api_key_override.strip(): return ui_api_key_override.strip()
25
+ env_var_name = API_KEYS_ENV_VARS.get(provider_upper)
26
+ if env_var_name:
27
+ env_key = os.getenv(env_var_name)
28
+ if env_key and env_key.strip(): return env_key.strip()
29
+ return None
30
+
31
+ def get_available_providers() -> list[str]:
32
+ return sorted(list(MODELS_BY_PROVIDER.keys()))
33
+
34
+ def get_model_display_names_for_provider(provider: str) -> list[str]:
35
+ return sorted(list(MODELS_BY_PROVIDER.get(provider.lower(), {}).get("models", {}).keys()))
36
+
37
+ def get_default_model_display_name_for_provider(provider: str) -> str | None:
38
+ provider_data = MODELS_BY_PROVIDER.get(provider.lower(), {})
39
+ models_dict = provider_data.get("models", {})
40
+ default_model_id = provider_data.get("default")
41
+ if default_model_id and models_dict:
42
+ for display_name, model_id_val in models_dict.items():
43
+ if model_id_val == default_model_id: return display_name
44
+ if models_dict: return sorted(list(models_dict.keys()))[0]
45
+ return None
46
+
47
+ def get_model_id_from_display_name(provider: str, display_name: str) -> str | None:
48
+ return MODELS_BY_PROVIDER.get(provider.lower(), {}).get("models", {}).get(display_name)
49
+
50
+ def call_model_stream(provider: str, model_display_name: str, messages: list[dict], api_key_override: str = None, temperature: float = 0.7, max_tokens: int = None) -> iter:
51
+ provider_lower = provider.lower()
52
+ api_key = _get_api_key(provider_lower, api_key_override)
53
+ base_url = API_URLS.get(provider.upper())
54
+ model_id = get_model_id_from_display_name(provider_lower, model_display_name)
55
+ if not all([api_key, base_url, model_id]):
56
+ yield f"Error: Configuration missing for {provider}/{model_display_name}."
57
+ return
58
+
59
+ headers = {"Authorization": f"Bearer {api_key}", "Content-Type": "application/json"}
60
+ payload = {"model": model_id, "messages": messages, "stream": True, "temperature": temperature}
61
+ if max_tokens: payload["max_tokens"] = max_tokens
62
+ if provider_lower == "openrouter": headers["HTTP-Referer"] = os.getenv("OPENROUTER_REFERRER", "http://localhost")
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
63
 
 
 
 
 
 
64
  try:
65
+ response = requests.post(base_url, headers=headers, json=payload, stream=True, timeout=180)
66
+ response.raise_for_status()
67
+ buffer = ""
68
+ for chunk in response.iter_content(chunk_size=None):
69
+ buffer += chunk.decode('utf-8', errors='replace')
70
+ while '\n\n' in buffer:
71
+ event_str, buffer = buffer.split('\n\n', 1)
72
+ if not event_str.strip() or not event_str.startswith('data: '): continue
73
+ data_json = event_str[len('data: '):].strip()
74
+ if data_json == '[DONE]': return
75
+ try:
76
+ data = json.loads(data_json)
77
+ if data.get("choices") and len(data["choices"]) > 0:
78
+ delta = data["choices"][0].get("delta", {})
79
+ if delta and delta.get("content"): yield delta["content"]
80
+ except json.JSONDecodeError: continue
81
+ except requests.exceptions.HTTPError as e:
82
+ yield f"Error: API HTTP Error ({e.response.status_code}): {e.response.text[:200]}"
83
  except Exception as e:
84
+ yield f"Error: An unexpected error occurred: {e}"