mirror of
https://github.com/grillazz/fastapi-sqlalchemy-asyncpg.git
synced 2026-03-06 10:00:39 +03:00
feat: enhance logging configuration with default values and shared processors
This commit is contained in:
@@ -7,90 +7,80 @@ import orjson
|
|||||||
import structlog
|
import structlog
|
||||||
from whenever._whenever import Instant
|
from whenever._whenever import Instant
|
||||||
|
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
# Constants / defaults
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
_DEFAULT_LOG_PATH = "."
|
||||||
|
_DEFAULT_MAX_BYTES = 10 * 1024 * 1024 # 10 MiB
|
||||||
|
_DEFAULT_BACKUP_COUNT = 5
|
||||||
|
|
||||||
def _configure_logger() -> structlog.BoundLogger:
|
# Generic registry: add any stdlib logger name + its desired level here.
|
||||||
"""
|
_STDLIB_LOGGERS: dict[str, int] = {
|
||||||
Configures and returns a structlog logger with a rotating file handler.
|
"root": logging.INFO,
|
||||||
|
"uvicorn": logging.INFO,
|
||||||
|
"sqlalchemy": logging.WARNING,
|
||||||
|
}
|
||||||
|
|
||||||
The logger is configured using environment variables for path, file size,
|
# Shared processor chain used by both structlog and the stdlib formatter.
|
||||||
and backup count. It formats logs as JSON.
|
_SHARED_PROCESSORS: list[structlog.types.Processor] = [
|
||||||
"""
|
|
||||||
log_dir = Path(os.environ.get("ROTOGER_LOG_PATH", "."))
|
|
||||||
log_dir.mkdir(parents=True, exist_ok=True)
|
|
||||||
log_date = Instant.now().py_datetime().strftime("%Y%m%d")
|
|
||||||
log_path = log_dir / f"{log_date}_{os.getpid()}.log"
|
|
||||||
|
|
||||||
# Use int() to ensure env var values are correctly typed
|
|
||||||
max_bytes = int(os.environ.get("ROTOGER_LOG_MAX_BYTES", 10 * 1024 * 1024))
|
|
||||||
backup_count = int(os.environ.get("ROTOGER_LOG_BACKUP_COUNT", 5))
|
|
||||||
|
|
||||||
handler = RotatingFileHandler(
|
|
||||||
filename=log_path,
|
|
||||||
maxBytes=max_bytes,
|
|
||||||
backupCount=backup_count,
|
|
||||||
encoding="utf-8",
|
|
||||||
|
|
||||||
)
|
|
||||||
|
|
||||||
# Use structlog's standard library integration
|
|
||||||
structlog.configure(
|
|
||||||
processors=[
|
|
||||||
structlog.contextvars.merge_contextvars,
|
structlog.contextvars.merge_contextvars,
|
||||||
structlog.stdlib.add_log_level,
|
structlog.stdlib.add_log_level,
|
||||||
|
structlog.stdlib.add_logger_name,
|
||||||
structlog.stdlib.PositionalArgumentsFormatter(),
|
structlog.stdlib.PositionalArgumentsFormatter(),
|
||||||
structlog.processors.TimeStamper(fmt="iso", utc=True),
|
structlog.processors.TimeStamper(fmt="iso", utc=True),
|
||||||
structlog.processors.format_exc_info,
|
structlog.processors.format_exc_info,
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
def _build_handler() -> RotatingFileHandler:
|
||||||
|
log_dir = Path(os.getenv("ROTOGER_LOG_PATH", _DEFAULT_LOG_PATH))
|
||||||
|
log_dir.mkdir(parents=True, exist_ok=True)
|
||||||
|
log_path = log_dir / f"{Instant.now().py_datetime().strftime('%Y%m%d')}_{os.getpid()}.log"
|
||||||
|
|
||||||
|
handler = RotatingFileHandler(
|
||||||
|
filename=log_path,
|
||||||
|
maxBytes=int(os.getenv("ROTOGER_LOG_MAX_BYTES", _DEFAULT_MAX_BYTES)),
|
||||||
|
backupCount=int(os.getenv("ROTOGER_LOG_BACKUP_COUNT", _DEFAULT_BACKUP_COUNT)),
|
||||||
|
encoding="utf-8",
|
||||||
|
)
|
||||||
|
handler.setFormatter(
|
||||||
|
structlog.stdlib.ProcessorFormatter(
|
||||||
|
foreign_pre_chain=_SHARED_PROCESSORS,
|
||||||
|
processor=structlog.processors.JSONRenderer(
|
||||||
|
serializer=lambda *a, **kw: orjson.dumps(*a, **kw).decode()
|
||||||
|
),
|
||||||
|
)
|
||||||
|
)
|
||||||
|
return handler
|
||||||
|
|
||||||
|
|
||||||
|
def _configure_logger() -> structlog.BoundLogger:
|
||||||
|
"""Configure structlog + stdlib loggers and return a bound logger."""
|
||||||
|
structlog.configure(
|
||||||
|
processors=[
|
||||||
|
*_SHARED_PROCESSORS,
|
||||||
structlog.stdlib.ProcessorFormatter.wrap_for_formatter,
|
structlog.stdlib.ProcessorFormatter.wrap_for_formatter,
|
||||||
# structlog.stdlib.add_logger_name,
|
|
||||||
],
|
],
|
||||||
logger_factory=structlog.stdlib.LoggerFactory(),
|
logger_factory=structlog.stdlib.LoggerFactory(),
|
||||||
wrapper_class=structlog.stdlib.BoundLogger,
|
wrapper_class=structlog.stdlib.BoundLogger,
|
||||||
cache_logger_on_first_use=True,
|
cache_logger_on_first_use=True,
|
||||||
)
|
)
|
||||||
|
|
||||||
# Configure the underlying standard logger
|
handler = _build_handler()
|
||||||
formatter = structlog.stdlib.ProcessorFormatter(
|
|
||||||
# These run after the processors defined in structlog.configure
|
|
||||||
foreign_pre_chain=[
|
|
||||||
structlog.contextvars.merge_contextvars,
|
|
||||||
structlog.stdlib.add_log_level,
|
|
||||||
structlog.stdlib.PositionalArgumentsFormatter(),
|
|
||||||
structlog.processors.TimeStamper(fmt="iso", utc=True),
|
|
||||||
structlog.processors.format_exc_info,
|
|
||||||
structlog.stdlib.add_logger_name,
|
|
||||||
],
|
|
||||||
processor=structlog.processors.JSONRenderer(
|
|
||||||
serializer=lambda *args, **kwargs: orjson.dumps(*args, **kwargs).decode()
|
|
||||||
),
|
|
||||||
)
|
|
||||||
handler.setFormatter(formatter)
|
|
||||||
root_logger = logging.getLogger("root") # Get the root logger
|
|
||||||
root_logger.addHandler(handler)
|
|
||||||
root_logger.propagate = False # Prevent logs from being propagated to the root logger
|
|
||||||
root_logger.setLevel(logging.INFO)
|
|
||||||
|
|
||||||
uvicorn_logger = logging.getLogger("uvicorn") # Get the root logger
|
for name, level in _STDLIB_LOGGERS.items():
|
||||||
uvicorn_logger.addHandler(handler)
|
logger = logging.getLogger(name)
|
||||||
uvicorn_logger.propagate = False # Prevent logs from being propagated to the root logger
|
logger.addHandler(handler)
|
||||||
uvicorn_logger.setLevel(logging.INFO)
|
logger.propagate = False
|
||||||
|
logger.setLevel(level)
|
||||||
|
|
||||||
sa_logger = logging.getLogger("sqlalchemy") # Get the root logger
|
|
||||||
sa_logger.addHandler(handler)
|
|
||||||
sa_logger.propagate = False # Prevent logs from being propagated to the root logger
|
|
||||||
sa_logger.setLevel(logging.WARNING)
|
|
||||||
|
|
||||||
# Set SQLAlchemy engine logger level specifically if needed
|
|
||||||
# logging.getLogger("sqlalchemy.engine").setLevel(logging.INFO)
|
|
||||||
return structlog.get_logger()
|
return structlog.get_logger()
|
||||||
|
|
||||||
|
|
||||||
|
# Module-level singleton
|
||||||
# Module-level singleton instance
|
|
||||||
_logger_instance = _configure_logger()
|
_logger_instance = _configure_logger()
|
||||||
|
|
||||||
|
|
||||||
def get_logger() -> structlog.BoundLogger:
|
def get_logger() -> structlog.BoundLogger:
|
||||||
"""
|
"""Return the configured singleton logger instance."""
|
||||||
Returns the configured singleton logger instance.
|
|
||||||
"""
|
|
||||||
return _logger_instance
|
return _logger_instance
|
||||||
|
|||||||
Reference in New Issue
Block a user