Update utils/storage.py
Browse files- utils/storage.py +325 -165
utils/storage.py
CHANGED
@@ -1,239 +1,399 @@
|
|
|
|
|
|
|
|
|
|
|
|
1 |
import json
|
|
|
2 |
import os
|
|
|
3 |
from pathlib import Path
|
4 |
-
from typing import Dict, List,
|
|
|
|
|
5 |
|
6 |
-
from utils.config import FILE_PATHS, DATA_DIR, EXPORT_DIR
|
7 |
from utils.logging import get_logger, log_performance
|
8 |
-
from utils.error_handling import handle_data_exceptions, DataError, ValidationError
|
9 |
|
10 |
-
# Initialize logger
|
11 |
logger = get_logger(__name__)
|
12 |
|
13 |
-
|
14 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
15 |
"""
|
16 |
-
Load data from a
|
17 |
|
18 |
Args:
|
19 |
-
|
|
|
|
|
|
|
20 |
default: Default value to return if file doesn't exist
|
21 |
-
|
22 |
Returns:
|
23 |
-
|
24 |
-
|
25 |
Raises:
|
26 |
-
|
27 |
"""
|
28 |
-
|
|
|
|
|
|
|
29 |
|
|
|
30 |
if not file_path.exists():
|
31 |
-
logger.
|
32 |
-
return default
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
33 |
|
34 |
try:
|
35 |
-
logger.debug(f"Loading data from {file_path}")
|
36 |
-
|
37 |
-
|
38 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
39 |
return data
|
|
|
40 |
except json.JSONDecodeError as e:
|
41 |
-
|
42 |
-
|
43 |
-
|
44 |
-
|
45 |
-
|
46 |
except Exception as e:
|
47 |
-
|
48 |
-
raise DataError(f"Error loading data from {file_path}", {"original_error": str(e)}) from e
|
49 |
|
50 |
-
@handle_data_exceptions
|
51 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
52 |
"""
|
53 |
-
Save data to a
|
54 |
|
55 |
Args:
|
56 |
-
file_path: Path to the JSON file
|
57 |
data: Data to save
|
58 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
59 |
Returns:
|
60 |
True if successful, False otherwise
|
61 |
-
|
62 |
Raises:
|
63 |
-
|
64 |
"""
|
65 |
-
|
|
|
66 |
|
67 |
-
|
68 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
69 |
|
70 |
try:
|
71 |
-
logger.debug(f"Saving data to {file_path}")
|
72 |
-
|
73 |
-
|
74 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
75 |
return True
|
|
|
76 |
except Exception as e:
|
77 |
-
|
78 |
-
|
|
|
|
|
|
|
|
|
|
|
79 |
|
80 |
-
@handle_data_exceptions
|
81 |
-
def
|
|
|
|
|
|
|
|
|
82 |
"""
|
83 |
-
|
84 |
|
85 |
Args:
|
86 |
-
|
87 |
-
|
88 |
-
|
89 |
-
|
90 |
Returns:
|
91 |
-
|
92 |
-
|
93 |
-
Raises:
|
94 |
-
ValidationError: If the data format is invalid
|
95 |
-
DataError: If there's an error exporting the data
|
96 |
"""
|
97 |
-
|
98 |
-
|
99 |
-
# Validate format type
|
100 |
-
if format_type not in ['json', 'csv', 'markdown']:
|
101 |
-
logger.error(f"Unsupported format type: {format_type}")
|
102 |
-
raise ValidationError(f"Unsupported format type: {format_type}")
|
103 |
|
104 |
-
|
105 |
-
if format_type == 'json':
|
106 |
-
result = json.dumps(data, indent=2, ensure_ascii=False)
|
107 |
-
elif format_type == 'csv':
|
108 |
-
# Simple CSV conversion for list of dictionaries
|
109 |
-
if not isinstance(data, list) or not all(isinstance(item, dict) for item in data):
|
110 |
-
logger.error("CSV export only supports list of dictionaries")
|
111 |
-
raise ValidationError("CSV export only supports list of dictionaries")
|
112 |
-
|
113 |
-
if not data:
|
114 |
-
return ""
|
115 |
-
|
116 |
-
headers = list(data[0].keys())
|
117 |
-
result = ",".join(headers) + "\n"
|
118 |
-
|
119 |
-
for item in data:
|
120 |
-
row = ",".join([str(item.get(header, "")).replace(",", ";") for header in headers])
|
121 |
-
result += row + "\n"
|
122 |
-
elif format_type == 'markdown':
|
123 |
-
# Simple Markdown conversion for list of dictionaries
|
124 |
-
if not isinstance(data, list) or not all(isinstance(item, dict) for item in data):
|
125 |
-
logger.error("Markdown export only supports list of dictionaries")
|
126 |
-
raise ValidationError("Markdown export only supports list of dictionaries")
|
127 |
-
|
128 |
-
if not data:
|
129 |
-
return ""
|
130 |
-
|
131 |
-
headers = list(data[0].keys())
|
132 |
-
result = "| " + " | ".join(headers) + " |\n"
|
133 |
-
result += "| " + " | ".join(["---" for _ in headers]) + " |\n"
|
134 |
-
|
135 |
-
for item in data:
|
136 |
-
row = "| " + " | ".join([str(item.get(header, "")).replace("|", "\|") for header in headers]) + " |"
|
137 |
-
result += row + "\n"
|
138 |
-
except Exception as e:
|
139 |
-
logger.error(f"Error formatting data for export: {str(e)}")
|
140 |
-
raise DataError(f"Error formatting data for export", {"original_error": str(e)}) from e
|
141 |
|
142 |
-
if
|
143 |
-
return
|
144 |
|
145 |
-
|
146 |
-
|
147 |
|
148 |
-
|
149 |
-
|
150 |
-
with open(file_path, 'w', encoding='utf-8') as f:
|
151 |
-
f.write(result)
|
152 |
-
logger.debug(f"Successfully exported data to {file_path}")
|
153 |
-
return True
|
154 |
-
except Exception as e:
|
155 |
-
logger.error(f"Error writing exported data to {file_path}: {str(e)}")
|
156 |
-
raise DataError(f"Error exporting data to {file_path}", {"original_error": str(e)}) from e
|
157 |
|
158 |
-
@handle_data_exceptions
|
159 |
-
def
|
160 |
"""
|
161 |
-
|
162 |
|
163 |
Args:
|
164 |
-
|
165 |
-
|
|
|
166 |
Returns:
|
167 |
-
|
168 |
-
|
169 |
-
Raises:
|
170 |
-
ValidationError: If the data type is invalid
|
171 |
"""
|
172 |
-
if
|
173 |
-
|
174 |
-
raise ValidationError(f"Invalid data type: {data_type}")
|
175 |
|
176 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
177 |
|
178 |
-
@handle_data_exceptions
|
179 |
-
def
|
180 |
"""
|
181 |
-
|
182 |
|
183 |
Args:
|
184 |
-
|
185 |
-
|
|
|
186 |
Returns:
|
187 |
-
|
188 |
"""
|
189 |
-
|
190 |
-
|
191 |
|
192 |
-
|
193 |
-
backup_name = f"mona_backup_{datetime.now().strftime('%Y%m%d_%H%M%S')}"
|
194 |
|
195 |
-
|
|
|
196 |
|
197 |
try:
|
198 |
-
|
199 |
-
|
200 |
-
|
201 |
-
|
202 |
-
|
203 |
-
|
204 |
-
|
205 |
-
|
206 |
-
|
|
|
207 |
except Exception as e:
|
208 |
-
logger.error(f"Error
|
209 |
-
|
210 |
|
211 |
-
|
212 |
-
|
|
|
213 |
"""
|
214 |
-
|
215 |
|
216 |
Args:
|
217 |
-
|
218 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
219 |
Returns:
|
220 |
True if successful, False otherwise
|
221 |
"""
|
222 |
-
|
|
|
223 |
|
224 |
-
|
|
|
|
|
|
|
|
|
225 |
|
226 |
-
|
227 |
-
|
228 |
-
|
|
|
|
|
229 |
|
230 |
-
|
231 |
-
|
232 |
-
|
233 |
-
|
234 |
-
|
235 |
-
|
236 |
-
|
237 |
-
|
238 |
-
|
239 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""
|
2 |
+
Storage utilities for the application.
|
3 |
+
Handles data persistence, loading, and saving operations.
|
4 |
+
"""
|
5 |
+
|
6 |
import json
|
7 |
+
import pickle
|
8 |
import os
|
9 |
+
import shutil
|
10 |
from pathlib import Path
|
11 |
+
from typing import Any, Dict, List, Optional, Union
|
12 |
+
from datetime import datetime
|
13 |
+
import tempfile
|
14 |
|
|
|
15 |
from utils.logging import get_logger, log_performance
|
16 |
+
from utils.error_handling import handle_data_exceptions, DataError, ValidationError, StorageError
|
17 |
|
|
|
18 |
logger = get_logger(__name__)
|
19 |
|
20 |
+
# Default storage directory
|
21 |
+
DEFAULT_STORAGE_DIR = Path("data")
|
22 |
+
DEFAULT_STORAGE_DIR.mkdir(exist_ok=True)
|
23 |
+
|
24 |
+
@handle_data_exceptions(default_return=None)
|
25 |
+
@log_performance
|
26 |
+
def load_data(
|
27 |
+
filename: str,
|
28 |
+
storage_dir: Union[str, Path] = None,
|
29 |
+
format: str = "auto",
|
30 |
+
encoding: str = "utf-8",
|
31 |
+
default: Any = None
|
32 |
+
) -> Any:
|
33 |
"""
|
34 |
+
Load data from a file.
|
35 |
|
36 |
Args:
|
37 |
+
filename: Name of the file to load
|
38 |
+
storage_dir: Directory containing the file (defaults to DEFAULT_STORAGE_DIR)
|
39 |
+
format: File format ('json', 'pickle', 'txt', or 'auto' to detect from extension)
|
40 |
+
encoding: Text encoding for text files
|
41 |
default: Default value to return if file doesn't exist
|
42 |
+
|
43 |
Returns:
|
44 |
+
Loaded data or default value
|
45 |
+
|
46 |
Raises:
|
47 |
+
StorageError: If file cannot be loaded
|
48 |
"""
|
49 |
+
if storage_dir is None:
|
50 |
+
storage_dir = DEFAULT_STORAGE_DIR
|
51 |
+
|
52 |
+
file_path = Path(storage_dir) / filename
|
53 |
|
54 |
+
# Return default if file doesn't exist
|
55 |
if not file_path.exists():
|
56 |
+
logger.info(f"File {file_path} does not exist, returning default value")
|
57 |
+
return default
|
58 |
+
|
59 |
+
# Auto-detect format from extension
|
60 |
+
if format == "auto":
|
61 |
+
ext = file_path.suffix.lower()
|
62 |
+
if ext == ".json":
|
63 |
+
format = "json"
|
64 |
+
elif ext in [".pkl", ".pickle"]:
|
65 |
+
format = "pickle"
|
66 |
+
else:
|
67 |
+
format = "txt"
|
68 |
|
69 |
try:
|
70 |
+
logger.debug(f"Loading data from {file_path} with format {format}")
|
71 |
+
|
72 |
+
if format == "json":
|
73 |
+
with open(file_path, 'r', encoding=encoding) as f:
|
74 |
+
data = json.load(f)
|
75 |
+
elif format == "pickle":
|
76 |
+
with open(file_path, 'rb') as f:
|
77 |
+
data = pickle.load(f)
|
78 |
+
elif format == "txt":
|
79 |
+
with open(file_path, 'r', encoding=encoding) as f:
|
80 |
+
data = f.read()
|
81 |
+
else:
|
82 |
+
raise StorageError(f"Unsupported format: {format}", operation="load")
|
83 |
+
|
84 |
+
logger.info(f"Successfully loaded data from {file_path}")
|
85 |
return data
|
86 |
+
|
87 |
except json.JSONDecodeError as e:
|
88 |
+
raise StorageError(f"Invalid JSON in file {file_path}: {e}", operation="load")
|
89 |
+
except pickle.UnpicklingError as e:
|
90 |
+
raise StorageError(f"Invalid pickle data in file {file_path}: {e}", operation="load")
|
91 |
+
except UnicodeDecodeError as e:
|
92 |
+
raise StorageError(f"Encoding error reading file {file_path}: {e}", operation="load")
|
93 |
except Exception as e:
|
94 |
+
raise StorageError(f"Error loading file {file_path}: {e}", operation="load")
|
|
|
95 |
|
96 |
+
@handle_data_exceptions(default_return=False, re_raise=True)
|
97 |
+
@log_performance
|
98 |
+
def save_data(
|
99 |
+
data: Any,
|
100 |
+
filename: str,
|
101 |
+
storage_dir: Union[str, Path] = None,
|
102 |
+
format: str = "auto",
|
103 |
+
encoding: str = "utf-8",
|
104 |
+
backup: bool = True,
|
105 |
+
atomic: bool = True
|
106 |
+
) -> bool:
|
107 |
"""
|
108 |
+
Save data to a file.
|
109 |
|
110 |
Args:
|
|
|
111 |
data: Data to save
|
112 |
+
filename: Name of the file to save to
|
113 |
+
storage_dir: Directory to save the file in (defaults to DEFAULT_STORAGE_DIR)
|
114 |
+
format: File format ('json', 'pickle', 'txt', or 'auto' to detect from extension)
|
115 |
+
encoding: Text encoding for text files
|
116 |
+
backup: Whether to create a backup of existing file
|
117 |
+
atomic: Whether to use atomic write (write to temp file then move)
|
118 |
+
|
119 |
Returns:
|
120 |
True if successful, False otherwise
|
121 |
+
|
122 |
Raises:
|
123 |
+
StorageError: If file cannot be saved
|
124 |
"""
|
125 |
+
if storage_dir is None:
|
126 |
+
storage_dir = DEFAULT_STORAGE_DIR
|
127 |
|
128 |
+
storage_path = Path(storage_dir)
|
129 |
+
storage_path.mkdir(parents=True, exist_ok=True)
|
130 |
+
|
131 |
+
file_path = storage_path / filename
|
132 |
+
|
133 |
+
# Auto-detect format from extension
|
134 |
+
if format == "auto":
|
135 |
+
ext = file_path.suffix.lower()
|
136 |
+
if ext == ".json":
|
137 |
+
format = "json"
|
138 |
+
elif ext in [".pkl", ".pickle"]:
|
139 |
+
format = "pickle"
|
140 |
+
else:
|
141 |
+
format = "txt"
|
142 |
+
|
143 |
+
# Create backup if requested and file exists
|
144 |
+
if backup and file_path.exists():
|
145 |
+
backup_path = file_path.with_suffix(f"{file_path.suffix}.backup")
|
146 |
+
try:
|
147 |
+
shutil.copy2(file_path, backup_path)
|
148 |
+
logger.debug(f"Created backup at {backup_path}")
|
149 |
+
except Exception as e:
|
150 |
+
logger.warning(f"Could not create backup: {e}")
|
151 |
|
152 |
try:
|
153 |
+
logger.debug(f"Saving data to {file_path} with format {format}")
|
154 |
+
|
155 |
+
if atomic:
|
156 |
+
# Use atomic write: write to temp file, then move
|
157 |
+
with tempfile.NamedTemporaryFile(mode='w' if format != 'pickle' else 'wb',
|
158 |
+
dir=storage_path,
|
159 |
+
delete=False,
|
160 |
+
suffix=f'.tmp_{filename}') as temp_file:
|
161 |
+
temp_path = Path(temp_file.name)
|
162 |
+
|
163 |
+
if format == "json":
|
164 |
+
json.dump(data, temp_file, indent=2, ensure_ascii=False)
|
165 |
+
elif format == "pickle":
|
166 |
+
pickle.dump(data, temp_file)
|
167 |
+
elif format == "txt":
|
168 |
+
if isinstance(data, str):
|
169 |
+
temp_file.write(data)
|
170 |
+
else:
|
171 |
+
temp_file.write(str(data))
|
172 |
+
else:
|
173 |
+
raise StorageError(f"Unsupported format: {format}", operation="save")
|
174 |
+
|
175 |
+
# Atomic move
|
176 |
+
shutil.move(str(temp_path), str(file_path))
|
177 |
+
else:
|
178 |
+
# Direct write
|
179 |
+
if format == "json":
|
180 |
+
with open(file_path, 'w', encoding=encoding) as f:
|
181 |
+
json.dump(data, f, indent=2, ensure_ascii=False)
|
182 |
+
elif format == "pickle":
|
183 |
+
with open(file_path, 'wb') as f:
|
184 |
+
pickle.dump(data, f)
|
185 |
+
elif format == "txt":
|
186 |
+
with open(file_path, 'w', encoding=encoding) as f:
|
187 |
+
if isinstance(data, str):
|
188 |
+
f.write(data)
|
189 |
+
else:
|
190 |
+
f.write(str(data))
|
191 |
+
else:
|
192 |
+
raise StorageError(f"Unsupported format: {format}", operation="save")
|
193 |
+
|
194 |
+
logger.info(f"Successfully saved data to {file_path}")
|
195 |
return True
|
196 |
+
|
197 |
except Exception as e:
|
198 |
+
# Clean up temp file if atomic write failed
|
199 |
+
if atomic and 'temp_path' in locals() and temp_path.exists():
|
200 |
+
try:
|
201 |
+
temp_path.unlink()
|
202 |
+
except:
|
203 |
+
pass
|
204 |
+
raise StorageError(f"Error saving file {file_path}: {e}", operation="save")
|
205 |
|
206 |
+
@handle_data_exceptions(default_return=[])
|
207 |
+
def list_files(
|
208 |
+
storage_dir: Union[str, Path] = None,
|
209 |
+
pattern: str = "*",
|
210 |
+
extension: str = None
|
211 |
+
) -> List[str]:
|
212 |
"""
|
213 |
+
List files in storage directory.
|
214 |
|
215 |
Args:
|
216 |
+
storage_dir: Directory to list files from
|
217 |
+
pattern: Glob pattern to match files
|
218 |
+
extension: File extension to filter by (e.g., '.json')
|
219 |
+
|
220 |
Returns:
|
221 |
+
List of filenames
|
|
|
|
|
|
|
|
|
222 |
"""
|
223 |
+
if storage_dir is None:
|
224 |
+
storage_dir = DEFAULT_STORAGE_DIR
|
|
|
|
|
|
|
|
|
225 |
|
226 |
+
storage_path = Path(storage_dir)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
227 |
|
228 |
+
if not storage_path.exists():
|
229 |
+
return []
|
230 |
|
231 |
+
if extension:
|
232 |
+
pattern = f"*{extension}"
|
233 |
|
234 |
+
files = [f.name for f in storage_path.glob(pattern) if f.is_file()]
|
235 |
+
return sorted(files)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
236 |
|
237 |
+
@handle_data_exceptions(default_return=False)
|
238 |
+
def delete_file(filename: str, storage_dir: Union[str, Path] = None) -> bool:
|
239 |
"""
|
240 |
+
Delete a file from storage.
|
241 |
|
242 |
Args:
|
243 |
+
filename: Name of the file to delete
|
244 |
+
storage_dir: Directory containing the file
|
245 |
+
|
246 |
Returns:
|
247 |
+
True if successful, False otherwise
|
|
|
|
|
|
|
248 |
"""
|
249 |
+
if storage_dir is None:
|
250 |
+
storage_dir = DEFAULT_STORAGE_DIR
|
|
|
251 |
|
252 |
+
file_path = Path(storage_dir) / filename
|
253 |
+
|
254 |
+
try:
|
255 |
+
if file_path.exists():
|
256 |
+
file_path.unlink()
|
257 |
+
logger.info(f"Deleted file {file_path}")
|
258 |
+
return True
|
259 |
+
else:
|
260 |
+
logger.warning(f"File {file_path} does not exist")
|
261 |
+
return False
|
262 |
+
except Exception as e:
|
263 |
+
logger.error(f"Error deleting file {file_path}: {e}")
|
264 |
+
return False
|
265 |
|
266 |
+
@handle_data_exceptions(default_return={})
|
267 |
+
def get_file_info(filename: str, storage_dir: Union[str, Path] = None) -> Dict[str, Any]:
|
268 |
"""
|
269 |
+
Get information about a file.
|
270 |
|
271 |
Args:
|
272 |
+
filename: Name of the file
|
273 |
+
storage_dir: Directory containing the file
|
274 |
+
|
275 |
Returns:
|
276 |
+
Dictionary with file information
|
277 |
"""
|
278 |
+
if storage_dir is None:
|
279 |
+
storage_dir = DEFAULT_STORAGE_DIR
|
280 |
|
281 |
+
file_path = Path(storage_dir) / filename
|
|
|
282 |
|
283 |
+
if not file_path.exists():
|
284 |
+
return {}
|
285 |
|
286 |
try:
|
287 |
+
stat = file_path.stat()
|
288 |
+
return {
|
289 |
+
"name": filename,
|
290 |
+
"path": str(file_path),
|
291 |
+
"size": stat.st_size,
|
292 |
+
"created": datetime.fromtimestamp(stat.st_ctime).isoformat(),
|
293 |
+
"modified": datetime.fromtimestamp(stat.st_mtime).isoformat(),
|
294 |
+
"extension": file_path.suffix,
|
295 |
+
"exists": True
|
296 |
+
}
|
297 |
except Exception as e:
|
298 |
+
logger.error(f"Error getting file info for {file_path}: {e}")
|
299 |
+
return {"name": filename, "exists": False, "error": str(e)}
|
300 |
|
301 |
+
# Configuration management
|
302 |
+
@handle_data_exceptions(default_return={})
|
303 |
+
def load_config(config_file: str = "config.json", storage_dir: Union[str, Path] = None) -> Dict[str, Any]:
|
304 |
"""
|
305 |
+
Load configuration from a JSON file.
|
306 |
|
307 |
Args:
|
308 |
+
config_file: Name of the configuration file
|
309 |
+
storage_dir: Directory containing the config file
|
310 |
+
|
311 |
+
Returns:
|
312 |
+
Configuration dictionary
|
313 |
+
"""
|
314 |
+
config = load_data(config_file, storage_dir, format="json", default={})
|
315 |
+
if not isinstance(config, dict):
|
316 |
+
logger.warning(f"Config file {config_file} did not contain a dictionary, using empty config")
|
317 |
+
return {}
|
318 |
+
return config
|
319 |
+
|
320 |
+
@handle_data_exceptions(default_return=False)
|
321 |
+
def save_config(config: Dict[str, Any], config_file: str = "config.json", storage_dir: Union[str, Path] = None) -> bool:
|
322 |
+
"""
|
323 |
+
Save configuration to a JSON file.
|
324 |
+
|
325 |
+
Args:
|
326 |
+
config: Configuration dictionary to save
|
327 |
+
config_file: Name of the configuration file
|
328 |
+
storage_dir: Directory to save the config file in
|
329 |
+
|
330 |
Returns:
|
331 |
True if successful, False otherwise
|
332 |
"""
|
333 |
+
if not isinstance(config, dict):
|
334 |
+
raise ValidationError("Config must be a dictionary")
|
335 |
|
336 |
+
return save_data(config, config_file, storage_dir, format="json")
|
337 |
+
|
338 |
+
# Cache management
|
339 |
+
class SimpleCache:
|
340 |
+
"""Simple in-memory cache with file backing."""
|
341 |
|
342 |
+
def __init__(self, cache_file: str = "cache.json", storage_dir: Union[str, Path] = None):
|
343 |
+
self.cache_file = cache_file
|
344 |
+
self.storage_dir = storage_dir
|
345 |
+
self._cache = {}
|
346 |
+
self.load_cache()
|
347 |
|
348 |
+
def load_cache(self):
|
349 |
+
"""Load cache from file."""
|
350 |
+
try:
|
351 |
+
self._cache = load_data(self.cache_file, self.storage_dir, default={})
|
352 |
+
if not isinstance(self._cache, dict):
|
353 |
+
self._cache = {}
|
354 |
+
except Exception as e:
|
355 |
+
logger.warning(f"Could not load cache: {e}")
|
356 |
+
self._cache = {}
|
357 |
+
|
358 |
+
def save_cache(self):
|
359 |
+
"""Save cache to file."""
|
360 |
+
try:
|
361 |
+
save_data(self._cache, self.cache_file, self.storage_dir)
|
362 |
+
except Exception as e:
|
363 |
+
logger.warning(f"Could not save cache: {e}")
|
364 |
+
|
365 |
+
def get(self, key: str, default: Any = None) -> Any:
|
366 |
+
"""Get value from cache."""
|
367 |
+
return self._cache.get(key, default)
|
368 |
+
|
369 |
+
def set(self, key: str, value: Any, save: bool = True) -> None:
|
370 |
+
"""Set value in cache."""
|
371 |
+
self._cache[key] = value
|
372 |
+
if save:
|
373 |
+
self.save_cache()
|
374 |
+
|
375 |
+
def delete(self, key: str, save: bool = True) -> bool:
|
376 |
+
"""Delete key from cache."""
|
377 |
+
if key in self._cache:
|
378 |
+
del self._cache[key]
|
379 |
+
if save:
|
380 |
+
self.save_cache()
|
381 |
+
return True
|
382 |
+
return False
|
383 |
+
|
384 |
+
def clear(self, save: bool = True) -> None:
|
385 |
+
"""Clear all cache."""
|
386 |
+
self._cache.clear()
|
387 |
+
if save:
|
388 |
+
self.save_cache()
|
389 |
+
|
390 |
+
def keys(self) -> List[str]:
|
391 |
+
"""Get all cache keys."""
|
392 |
+
return list(self._cache.keys())
|
393 |
+
|
394 |
+
# Global cache instance
|
395 |
+
default_cache = SimpleCache()
|
396 |
+
|
397 |
+
def get_cache() -> SimpleCache:
|
398 |
+
"""Get the default cache instance."""
|
399 |
+
return default_cache
|