File size: 6,310 Bytes
7a88b43 261082f 7a88b43 4bc9267 7a88b43 0076e5e 4bc9267 7a88b43 261082f 0076e5e 7a88b43 0076e5e 7a88b43 f86f7fa 0076e5e f86f7fa 4bc9267 f86f7fa 0076e5e f86f7fa 261082f 4bc9267 7a88b43 261082f |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 |
import logging
import os
import sys
from logging.handlers import TimedRotatingFileHandler
from pathlib import Path
import structlog
from dotenv import load_dotenv
from structlog.processors import CallsiteParameter
from structlog.stdlib import BoundLogger
from structlog.typing import EventDict, Processor
# Load environment variables
load_dotenv()
class Logger:
"""
Configure and setup logging with Structlog.
Args:
json_logs (bool, optional): Whether to log in JSON format. Defaults to False.
log_level (str, optional): Minimum log level to display. Defaults to "INFO".
"""
def __init__(self, json_logs: bool = False, log_level: str = "INFO"):
self.json_logs = json_logs
self.log_level = log_level.upper()
self.environment = os.getenv("ENVIRONMENT", "PROD").upper()
# Skip file logging in production/Hugging Face environment
if self.environment in ["PROD", "HUGGINGFACE"]:
self.log_file_path = None
else:
self.log_file_path = os.getenv("LOG_FILE_PATH", self._get_default_log_file_path())
def _get_default_log_file_path(self) -> str:
"""Get the default log file path."""
if self.environment == "DEV":
# For development, use local logs directory
log_dir = Path("logs")
log_dir.mkdir(parents=True, exist_ok=True)
return str(log_dir / "app.log")
return None
def _rename_event_key(self, _, __, event_dict: EventDict) -> EventDict:
"""
Renames the 'event' key to 'message' in log entries.
"""
event_dict["message"] = event_dict.pop("event", "")
return event_dict
def _drop_color_message_key(self, _, __, event_dict: EventDict) -> EventDict:
"""
Removes the 'color_message' key from log entries.
"""
event_dict.pop("color_message", None)
return event_dict
def _get_processors(self) -> list[Processor]:
"""
Returns a list of structlog processors based on the specified configuration.
"""
processors: list[Processor] = [
structlog.contextvars.merge_contextvars,
structlog.stdlib.add_logger_name,
structlog.stdlib.add_log_level,
structlog.stdlib.PositionalArgumentsFormatter(),
structlog.stdlib.ExtraAdder(),
self._drop_color_message_key,
structlog.processors.TimeStamper(fmt="iso"),
structlog.processors.StackInfoRenderer(),
structlog.processors.CallsiteParameterAdder(
[
CallsiteParameter.FILENAME,
CallsiteParameter.FUNC_NAME,
CallsiteParameter.LINENO,
],
),
]
if self.json_logs:
processors.append(self._rename_event_key)
processors.append(structlog.processors.format_exc_info)
return processors
def _clear_uvicorn_loggers(self):
"""
Clears the log handlers for uvicorn loggers.
"""
for _log in ["uvicorn", "uvicorn.error", "uvicorn.access"]:
logging.getLogger(_log).handlers.clear()
logging.getLogger(_log).propagate = True
def _configure_structlog(self, processors: list[Processor]):
"""
Configures structlog with the specified processors.
"""
structlog.configure(
processors=processors
+ [
structlog.stdlib.ProcessorFormatter.wrap_for_formatter,
],
logger_factory=structlog.stdlib.LoggerFactory(),
cache_logger_on_first_use=True,
)
def _configure_logging(self, processors: list[Processor]) -> logging.Logger:
"""Configures logging with the specified processors."""
formatter = structlog.stdlib.ProcessorFormatter(
foreign_pre_chain=processors,
processors=[
structlog.stdlib.ProcessorFormatter.remove_processors_meta,
structlog.processors.JSONRenderer()
if self.json_logs
else structlog.dev.ConsoleRenderer(colors=True),
],
)
root_logger = logging.getLogger()
root_logger.handlers.clear() # Clear existing handlers
# Always add console logging
stream_handler = logging.StreamHandler()
stream_handler.setFormatter(formatter)
root_logger.addHandler(stream_handler)
# Only add file logging if in development and log_file_path is set
if self.environment == "DEV" and self.log_file_path:
try:
file_handler = TimedRotatingFileHandler(
filename=self.log_file_path,
when="midnight",
interval=1,
backupCount=7,
encoding="utf-8",
)
file_handler.setFormatter(formatter)
root_logger.addHandler(file_handler)
except PermissionError:
# If file logging fails, continue with console logging only
pass
root_logger.setLevel(self.log_level)
return root_logger
def _configure(self):
"""
Configures logging and structlog, and sets up exception handling.
"""
shared_processors: list[Processor] = self._get_processors()
self._configure_structlog(shared_processors)
root_logger = self._configure_logging(shared_processors)
self._clear_uvicorn_loggers()
def handle_exception(exc_type, exc_value, exc_traceback):
"""
Logs uncaught exceptions.
"""
if issubclass(exc_type, KeyboardInterrupt):
sys.__excepthook__(exc_type, exc_value, exc_traceback)
return
root_logger.error(
"Uncaught exception", exc_info=(exc_type, exc_value, exc_traceback)
)
sys.excepthook = handle_exception
def setup_logging(self) -> BoundLogger:
"""
Sets up logging configuration for the application.
Returns:
BoundLogger: The configured logger instance.
"""
self._configure()
return structlog.get_logger()
|