methunraj
refactor: restructure project with modular prompts and instructions
90b0a17
raw
history blame
4.46 kB
import logging
import logging.handlers
from datetime import datetime, timedelta
from pathlib import Path
import os
import glob
class AgentLogger:
def __init__(self, log_dir="logs", max_bytes=10*1024*1024, backup_count=5, cleanup_days=7):
self.log_dir = Path(log_dir)
self.log_dir.mkdir(exist_ok=True)
self.max_bytes = max_bytes
self.backup_count = backup_count
self.cleanup_days = cleanup_days
self.logger = logging.getLogger("agent_logger")
self.logger.setLevel(logging.DEBUG)
formatter = logging.Formatter("%(asctime)s - %(levelname)s - %(message)s")
# Console handler
console_handler = logging.StreamHandler()
console_handler.setLevel(logging.INFO)
console_handler.setFormatter(formatter)
self.logger.addHandler(console_handler)
# Rotating file handler
log_file = self.log_dir / f"agents_{datetime.now().strftime('%Y%m%d')}.log"
file_handler = logging.handlers.RotatingFileHandler(
log_file,
maxBytes=max_bytes,
backupCount=backup_count,
encoding='utf-8'
)
file_handler.setLevel(logging.DEBUG)
file_handler.setFormatter(formatter)
self.logger.addHandler(file_handler)
# Clean up old log files on startup
self.cleanup_old_logs()
def log_workflow_step(self, agent_name, message):
self.logger.info(f"{agent_name}: {message}")
def log_agent_output(self, agent_name, output, method, duration):
self.logger.debug(f"{agent_name} {method} output: {output} ({duration}s)")
def log_inter_agent_pass(self, from_agent, to_agent, data_size):
self.logger.info(f"πŸ”— PASS: {from_agent} β†’ {to_agent} | Size: {data_size}")
def cleanup_old_logs(self):
"""Clean up log files older than cleanup_days."""
try:
cutoff_date = datetime.now() - timedelta(days=self.cleanup_days)
log_pattern = str(self.log_dir / "agents_*.log*")
deleted_count = 0
for log_file_path in glob.glob(log_pattern):
log_file = Path(log_file_path)
try:
# Get file modification time
file_mtime = datetime.fromtimestamp(log_file.stat().st_mtime)
if file_mtime < cutoff_date:
log_file.unlink()
deleted_count += 1
print(f"Deleted old log file: {log_file.name}")
except Exception as e:
print(f"Error deleting log file {log_file}: {e}")
if deleted_count > 0:
print(f"Cleaned up {deleted_count} old log files")
except Exception as e:
print(f"Error during log cleanup: {e}")
def get_log_stats(self):
"""Get statistics about log files."""
try:
log_pattern = str(self.log_dir / "agents_*.log*")
log_files = list(glob.glob(log_pattern))
total_size = 0
file_info = []
for log_file_path in log_files:
log_file = Path(log_file_path)
try:
size = log_file.stat().st_size
mtime = datetime.fromtimestamp(log_file.stat().st_mtime)
total_size += size
file_info.append({
'name': log_file.name,
'size_mb': round(size / (1024*1024), 2),
'modified': mtime.strftime('%Y-%m-%d %H:%M:%S')
})
except Exception as e:
print(f"Error reading log file {log_file}: {e}")
return {
'total_files': len(log_files),
'total_size_mb': round(total_size / (1024*1024), 2),
'files': file_info
}
except Exception as e:
print(f"Error getting log stats: {e}")
return {'error': str(e)}
# Create global logger with configuration
agent_logger = AgentLogger(
log_dir="logs",
max_bytes=10*1024*1024, # 10MB per file
backup_count=5, # Keep 5 backup files
cleanup_days=7 # Delete files older than 7 days
)