|
|
import logging |
|
|
import os, json |
|
|
from logging.handlers import TimedRotatingFileHandler |
|
|
from datetime import datetime |
|
|
|
|
|
class GetLogger: |
|
|
def __init__(self, logging_level="INFO", log_to_console=True, log_dir="logs"): |
|
|
""" |
|
|
Advanced Logger |
|
|
- Logs to both file (rotating) and console |
|
|
- Default rotation: daily, keep last 7 logs |
|
|
- Safe filename (no ':' in timestamp) |
|
|
""" |
|
|
|
|
|
self.logger = logging.getLogger(__name__) |
|
|
self.logger.setLevel(logging_level.upper()) |
|
|
|
|
|
|
|
|
if self.logger.hasHandlers(): |
|
|
self.logger.handlers.clear() |
|
|
|
|
|
|
|
|
os.makedirs(log_dir, exist_ok=True) |
|
|
|
|
|
|
|
|
file_name = datetime.now().strftime("%Y-%m-%d_%H-%M-%S") + ".log" |
|
|
log_path = os.path.join(log_dir, file_name) |
|
|
file_handler = TimedRotatingFileHandler( |
|
|
filename=log_path, when="D", interval=1, backupCount=3, encoding="utf-8" |
|
|
) |
|
|
|
|
|
formatter = logging.Formatter( |
|
|
"%(asctime)s [%(levelname)s] [%(filename)s:%(lineno)d] %(funcName)s() - %(message)s", |
|
|
datefmt="%Y-%m-%d %H:%M:%S" |
|
|
) |
|
|
file_handler.setFormatter(formatter) |
|
|
self.logger.addHandler(file_handler) |
|
|
|
|
|
|
|
|
if log_to_console: |
|
|
console_handler = logging.StreamHandler() |
|
|
console_handler.setFormatter(formatter) |
|
|
self.logger.addHandler(console_handler) |
|
|
|
|
|
def get_logger(self): |
|
|
return self.logger |
|
|
|
|
|
def delete_logger(self): |
|
|
"""Remove all handlers and delete logger.""" |
|
|
handlers = self.logger.handlers[:] |
|
|
for handler in handlers: |
|
|
self.logger.removeHandler(handler) |
|
|
handler.close() |
|
|
del self.logger |
|
|
|
|
|
class MetricsLogger: |
|
|
""" |
|
|
Collects evaluation metrics and saves aggregated statistics. |
|
|
""" |
|
|
def __init__(self, save_path="logs/metrics_summary.json", logger=None): |
|
|
self.save_path = save_path |
|
|
self.metrics = [] |
|
|
self.logger = logger or logging.getLogger(__name__) |
|
|
|
|
|
def log_query_metrics(self, query, result_dict): |
|
|
""" |
|
|
Log metrics for a single query. |
|
|
Example: result_dict = {"latency_sec": 0.5, "rougeL": 0.7, ...} |
|
|
""" |
|
|
record = {"query": query} |
|
|
record.update(result_dict) |
|
|
self.metrics.append(record) |
|
|
self.logger.info(f"π Metrics logged for query: {query[:50]}...") |
|
|
|
|
|
def summarize(self): |
|
|
"""Aggregate metrics (mean values).""" |
|
|
if not self.metrics: |
|
|
return {} |
|
|
|
|
|
summary = {} |
|
|
keys = [k for k in self.metrics[0].keys() if k != "query"] |
|
|
for key in keys: |
|
|
values = [m[key] for m in self.metrics if key in m and isinstance(m[key], (int, float))] |
|
|
if values: |
|
|
summary[f"avg_{key}"] = float(sum(values) / len(values)) |
|
|
|
|
|
return summary |
|
|
|
|
|
def save(self): |
|
|
"""Save all metrics + summary to JSON.""" |
|
|
os.makedirs(os.path.dirname(self.save_path), exist_ok=True) |
|
|
data = { |
|
|
"per_query": self.metrics, |
|
|
"summary": self.summarize() |
|
|
} |
|
|
with open(self.save_path, "w", encoding="utf-8") as f: |
|
|
json.dump(data, f, indent=2) |
|
|
self.logger.info(f"β
Metrics saved to {self.save_path}") |
|
|
return data |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
if __name__ == "__main__": |
|
|
obj = GetLogger() |
|
|
logger = obj.get_logger() |
|
|
logger.info("β
Logger initialized successfully") |
|
|
logger.warning("β οΈ This is a warning") |
|
|
logger.error("β This is an error") |
|
|
|